Report generated on 10-Nov-2020 at 01:04:19 by pytest-html v2.1.1
389-ds-base | 2.0.1-20201110gitca8ac8e.fc32 |
Packages | {"pluggy": "0.13.1", "py": "1.9.0", "pytest": "5.4.3"} |
Platform | Linux-5.7.7-200.fc32.x86_64-x86_64-with-glibc2.2.5 |
Plugins | {"html": "2.1.1", "libfaketime": "0.1.2", "metadata": "1.10.0"} |
Python | 3.8.6 |
cyrus-sasl | 2.1.27-4.fc32 |
nspr | 4.29.0-1.fc32 |
nss | 3.57.0-1.fc32 |
openldap | 2.4.47-5.fc32 |
2068 tests ran in 20444.76 seconds.
(Un)check the boxes to filter the results.
1975 passed, 21 skipped, 65 failed, 4 errors, 20 expected failures, 8 unexpected passesResult | Test | Duration | Links |
---|---|---|---|
No results found. Try to check the filters | |||
Error | tickets/ticket48973_test.py::test_ticket48973_init::setup | 1.84 | |
request = <SubRequest 'topology' for <Function test_ticket48973_init>> @pytest.fixture(scope="module") def topology(request): # Creating standalone instance ... standalone = DirSrv(verbose=False) args_instance[SER_HOST] = HOST_STANDALONE args_instance[SER_PORT] = PORT_STANDALONE args_instance[SER_SERVERID_PROP] = SERVERID_STANDALONE args_instance[SER_CREATION_SUFFIX] = DEFAULT_SUFFIX args_standalone = args_instance.copy() standalone.allocate(args_standalone) instance_standalone = standalone.exists() if instance_standalone: standalone.delete() > standalone.create() /export/tests/tickets/ticket48973_test.py:52: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.8/site-packages/lib389/__init__.py:838: in create self._createDirsrv(version) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:808: in _createDirsrv sds.create_from_args(general, slapd, backends, None) /usr/local/lib/python3.8/site-packages/lib389/instance/setup.py:663: in create_from_args self._prepare_ds(general, slapd, backends) /usr/local/lib/python3.8/site-packages/lib389/instance/setup.py:594: in _prepare_ds assert_c(slapd['root_dn'] is not None, "Configuration root_dn in section [slapd] not found") _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ condition = False, msg = 'Configuration root_dn in section [slapd] not found' def assert_c(condition, msg="Assertion Failed"): """This is the same as assert, but assert is compiled out when optimisation is enabled. This prevents compiling out. """ if not condition: > raise AssertionError(msg) E AssertionError: Configuration root_dn in section [slapd] not found /usr/local/lib/python3.8/site-packages/lib389/utils.py:1243: AssertionError -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... | |||
Error | tickets/ticket48973_test.py::test_ticket48973_ces_not_indexed::setup | 0.00 | |
request = <SubRequest 'topology' for <Function test_ticket48973_init>> @pytest.fixture(scope="module") def topology(request): # Creating standalone instance ... standalone = DirSrv(verbose=False) args_instance[SER_HOST] = HOST_STANDALONE args_instance[SER_PORT] = PORT_STANDALONE args_instance[SER_SERVERID_PROP] = SERVERID_STANDALONE args_instance[SER_CREATION_SUFFIX] = DEFAULT_SUFFIX args_standalone = args_instance.copy() standalone.allocate(args_standalone) instance_standalone = standalone.exists() if instance_standalone: standalone.delete() > standalone.create() /export/tests/tickets/ticket48973_test.py:52: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.8/site-packages/lib389/__init__.py:838: in create self._createDirsrv(version) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:808: in _createDirsrv sds.create_from_args(general, slapd, backends, None) /usr/local/lib/python3.8/site-packages/lib389/instance/setup.py:663: in create_from_args self._prepare_ds(general, slapd, backends) /usr/local/lib/python3.8/site-packages/lib389/instance/setup.py:594: in _prepare_ds assert_c(slapd['root_dn'] is not None, "Configuration root_dn in section [slapd] not found") _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ condition = False, msg = 'Configuration root_dn in section [slapd] not found' def assert_c(condition, msg="Assertion Failed"): """This is the same as assert, but assert is compiled out when optimisation is enabled. This prevents compiling out. """ if not condition: > raise AssertionError(msg) E AssertionError: Configuration root_dn in section [slapd] not found /usr/local/lib/python3.8/site-packages/lib389/utils.py:1243: AssertionError | |||
Error | tickets/ticket48973_test.py::test_ticket48973_homeDirectory_indexing::setup | 0.00 | |
request = <SubRequest 'topology' for <Function test_ticket48973_init>> @pytest.fixture(scope="module") def topology(request): # Creating standalone instance ... standalone = DirSrv(verbose=False) args_instance[SER_HOST] = HOST_STANDALONE args_instance[SER_PORT] = PORT_STANDALONE args_instance[SER_SERVERID_PROP] = SERVERID_STANDALONE args_instance[SER_CREATION_SUFFIX] = DEFAULT_SUFFIX args_standalone = args_instance.copy() standalone.allocate(args_standalone) instance_standalone = standalone.exists() if instance_standalone: standalone.delete() > standalone.create() /export/tests/tickets/ticket48973_test.py:52: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.8/site-packages/lib389/__init__.py:838: in create self._createDirsrv(version) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:808: in _createDirsrv sds.create_from_args(general, slapd, backends, None) /usr/local/lib/python3.8/site-packages/lib389/instance/setup.py:663: in create_from_args self._prepare_ds(general, slapd, backends) /usr/local/lib/python3.8/site-packages/lib389/instance/setup.py:594: in _prepare_ds assert_c(slapd['root_dn'] is not None, "Configuration root_dn in section [slapd] not found") _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ condition = False, msg = 'Configuration root_dn in section [slapd] not found' def assert_c(condition, msg="Assertion Failed"): """This is the same as assert, but assert is compiled out when optimisation is enabled. This prevents compiling out. """ if not condition: > raise AssertionError(msg) E AssertionError: Configuration root_dn in section [slapd] not found /usr/local/lib/python3.8/site-packages/lib389/utils.py:1243: AssertionError | |||
Error | tickets/ticket48973_test.py::test_ticket48973_homeDirectory_caseExactIA5Match_caseIgnoreIA5Match_indexing::setup | 0.00 | |
request = <SubRequest 'topology' for <Function test_ticket48973_init>> @pytest.fixture(scope="module") def topology(request): # Creating standalone instance ... standalone = DirSrv(verbose=False) args_instance[SER_HOST] = HOST_STANDALONE args_instance[SER_PORT] = PORT_STANDALONE args_instance[SER_SERVERID_PROP] = SERVERID_STANDALONE args_instance[SER_CREATION_SUFFIX] = DEFAULT_SUFFIX args_standalone = args_instance.copy() standalone.allocate(args_standalone) instance_standalone = standalone.exists() if instance_standalone: standalone.delete() > standalone.create() /export/tests/tickets/ticket48973_test.py:52: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.8/site-packages/lib389/__init__.py:838: in create self._createDirsrv(version) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:808: in _createDirsrv sds.create_from_args(general, slapd, backends, None) /usr/local/lib/python3.8/site-packages/lib389/instance/setup.py:663: in create_from_args self._prepare_ds(general, slapd, backends) /usr/local/lib/python3.8/site-packages/lib389/instance/setup.py:594: in _prepare_ds assert_c(slapd['root_dn'] is not None, "Configuration root_dn in section [slapd] not found") _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ condition = False, msg = 'Configuration root_dn in section [slapd] not found' def assert_c(condition, msg="Assertion Failed"): """This is the same as assert, but assert is compiled out when optimisation is enabled. This prevents compiling out. """ if not condition: > raise AssertionError(msg) E AssertionError: Configuration root_dn in section [slapd] not found /usr/local/lib/python3.8/site-packages/lib389/utils.py:1243: AssertionError | |||
Failed | suites/acl/keywords_part2_test.py::test_access_from_certain_network_only_ip | 3.85 | |
topo = <lib389.topologies.TopologyMain object at 0x7faa5a044820> add_user = None, aci_of_user = None def test_access_from_certain_network_only_ip(topo, add_user, aci_of_user): """ User can access the data when connecting from certain network only as per the ACI. :id: 4ec38296-7ac5-11e8-9816-8c16451d917b :setup: Standalone Server :steps: 1. Add test entry 2. Add ACI 3. User should follow ACI role :expectedresults: 1. Entry should be added 2. Operation should succeed 3. Operation should succeed """ # Turn access log buffering off to make less time consuming topo.standalone.config.set('nsslapd-accesslog-logbuffering', 'off') # Find the ip from ds logs , as we need to know the exact ip used by ds to run the instances. # Wait till Access Log is generated topo.standalone.restart() # Add ACI domain = Domain(topo.standalone, DEFAULT_SUFFIX) domain.add("aci", f'(target = "ldap:///{IP_OU_KEY}")(targetattr=\"*\")(version 3.0; aci "IP aci"; ' f'allow(all)userdn = "ldap:///{NETSCAPEIP_KEY}" and ip = "::1" ;)') # create a new connection for the test conn = UserAccount(topo.standalone, NETSCAPEIP_KEY).bind(PW_DM) # Perform Operation org = OrganizationalUnit(conn, IP_OU_KEY) > org.replace("seeAlso", "cn=1") suites/acl/keywords_part2_test.py:76: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:280: in replace self.set(key, value, action=ldap.MOD_REPLACE) /usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:446: in set return self._instance.modify_ext_s(self._dn, [(action, key, value)], /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:613: in modify_ext_s resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:764: in result3 resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:774: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa5a5a4dc0> func = <built-in method result4 of LDAP object at 0x7faa5a051a80> args = (3, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.INSUFFICIENT_ACCESS: {'msgtype': 103, 'msgid': 3, 'result': 50, 'desc': 'Insufficient access', 'ctrls': [], 'info': "Insufficient 'write' privilege to the 'seeAlso' attribute of entry 'ou=ip,ou=keywords,dc=example,dc=com'.\n"} /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:324: INSUFFICIENT_ACCESS -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Failed | suites/acl/keywords_part2_test.py::test_connectin_from_an_unauthorized_network | 0.11 | |
topo = <lib389.topologies.TopologyMain object at 0x7faa5a044820> add_user = None, aci_of_user = None def test_connectin_from_an_unauthorized_network(topo, add_user, aci_of_user): """ User cannot access the data when connectin from an unauthorized network as per the ACI. :id: 52d1ecce-7ac5-11e8-9ad9-8c16451d917b :setup: Standalone Server :steps: 1. Add test entry 2. Add ACI 3. User should follow ACI role :expectedresults: 1. Entry should be added 2. Operation should succeed 3. Operation should succeed """ # Add ACI domain = Domain(topo.standalone, DEFAULT_SUFFIX) domain.add("aci", f'(target = "ldap:///{IP_OU_KEY}")' f'(targetattr="*")(version 3.0; aci "IP aci"; ' f'allow(all) userdn = "ldap:///{NETSCAPEIP_KEY}" ' f'and ip != "::1" ;)') # create a new connection for the test conn = UserAccount(topo.standalone, NETSCAPEIP_KEY).bind(PW_DM) # Perform Operation org = OrganizationalUnit(conn, IP_OU_KEY) with pytest.raises(ldap.INSUFFICIENT_ACCESS): > org.replace("seeAlso", "cn=1") E Failed: DID NOT RAISE <class 'ldap.INSUFFICIENT_ACCESS'> suites/acl/keywords_part2_test.py:119: Failed | |||
Failed | suites/clu/repl_monitor_test.py::test_dsconf_replication_monitor | 0.65 | |
topology_m2 = <lib389.topologies.TopologyMain object at 0x7faa58cd7f40> set_log_file = None @pytest.mark.ds50545 @pytest.mark.bz1739718 @pytest.mark.skipif(ds_is_older("1.4.0"), reason="Not implemented") def test_dsconf_replication_monitor(topology_m2, set_log_file): """Test replication monitor that was ported from legacy tools :id: ce48020d-7c30-41b7-8f68-144c9cd757f6 :setup: 2 MM topology :steps: 1. Create DS instance 2. Run replication monitor with connections option 3. Run replication monitor with aliases option 4. Run replication monitor with --json option 5. Run replication monitor with .dsrc file created :expectedresults: 1. Success 2. Success 3. Success 4. Success 5. Success """ m1 = topology_m2.ms["master1"] m2 = topology_m2.ms["master2"] alias_content = ['Supplier: M1 (' + m1.host + ':' + str(m1.port) + ')', 'Supplier: M2 (' + m2.host + ':' + str(m2.port) + ')'] connection_content = 'Supplier: '+ m1.host + ':' + str(m1.port) content_list = ['Replica Root: dc=example,dc=com', 'Replica ID: 1', 'Replica Status: Available', 'Max CSN', 'Status For Agreement: "002" ('+ m2.host + ':' + str(m2.port) + ')', 'Replica Enabled: on', 'Update In Progress: FALSE', 'Last Update Start:', 'Last Update End:', 'Number Of Changes Sent:', 'Number Of Changes Skipped: None', 'Last Update Status: Error (0) Replica acquired successfully: Incremental update succeeded', 'Last Init Start:', 'Last Init End:', 'Last Init Status:', 'Reap Active: 0', 'Replication Status: In Synchronization', 'Replication Lag Time:', 'Supplier: ', m2.host + ':' + str(m2.port), 'Replica Root: dc=example,dc=com', 'Replica ID: 2', 'Status For Agreement: "001" (' + m1.host + ':' + str(m1.port)+')'] json_list = ['type', 'list', 'items', 'name', m1.host + ':' + str(m1.port), 'data', '"replica_id": "1"', '"replica_root": "dc=example,dc=com"', '"replica_status": "Available"', 'maxcsn', 'agmts_status', 'agmt-name', '002', 'replica', m2.host + ':' + str(m2.port), 'replica-enabled', 'update-in-progress', 'last-update-start', 'last-update-end', 'number-changes-sent', 'number-changes-skipped', 'last-update-status', 'Error (0) Replica acquired successfully: Incremental update succeeded', 'last-init-start', 'last-init-end', 'last-init-status', 'reap-active', 'replication-status', 'In Synchronization', 'replication-lag-time', '"replica_id": "2"', '001', m1.host + ':' + str(m1.port)] dsrc_content = '[repl-monitor-connections]\n' \ 'connection1 = ' + m1.host + ':' + str(m1.port) + ':' + DN_DM + ':' + PW_DM + '\n' \ 'connection2 = ' + m2.host + ':' + str(m2.port) + ':' + DN_DM + ':' + PW_DM + '\n' \ '\n' \ '[repl-monitor-aliases]\n' \ 'M1 = ' + m1.host + ':' + str(m1.port) + '\n' \ 'M2 = ' + m2.host + ':' + str(m2.port) connections = [m1.host + ':' + str(m1.port) + ':' + DN_DM + ':' + PW_DM, m2.host + ':' + str(m2.port) + ':' + DN_DM + ':' + PW_DM] aliases = ['M1=' + m1.host + ':' + str(m1.port), 'M2=' + m2.host + ':' + str(m2.port)] args = FakeArgs() args.connections = connections args.aliases = None args.json = False log.info('Run replication monitor with connections option') get_repl_monitor_info(m1, DEFAULT_SUFFIX, log, args) check_value_in_log_and_reset(content_list, connection_content) log.info('Run replication monitor with aliases option') args.aliases = aliases get_repl_monitor_info(m1, DEFAULT_SUFFIX, log, args) > check_value_in_log_and_reset(content_list, alias_content) suites/clu/repl_monitor_test.py:177: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ content_list = ['Replica Root: dc=example,dc=com', 'Replica ID: 1', 'Replica Status: Available', 'Max CSN', 'Status For Agreement: "002" (ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002)', 'Replica Enabled: on', ...] second_list = ['Supplier: M1 (ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001)', 'Supplier: M2 (ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002)'] single_value = None def check_value_in_log_and_reset(content_list, second_list=None, single_value=None): with open(LOG_FILE, 'r+') as f: file_content = f.read() for item in content_list: log.info('Check that "{}" is present'.format(item)) assert item in file_content if second_list is not None: log.info('Check for "{}"'.format(second_list)) for item in second_list: > assert item in file_content E AssertionError: assert 'Supplier: M1 (ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001)' in 'Run replication monitor with aliases option\ndsrc path: /root/.dsrc\ndsrc container path: /data/config/container.inf\...t Init Status: unavailable\nReap Active: 0\nReplication Status: In Synchronization\nReplication Lag Time: 00:00:00\n\n' suites/clu/repl_monitor_test.py:54: AssertionError -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 2d5d1be9-62dd-495e-9db3-956caba6e319 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 9ab0770d-a607-420a-9a20-93d1232c79ac / got description=2d5d1be9-62dd-495e-9db3-956caba6e319) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:170 Run replication monitor with connections option [35mDEBUG [0m tests.suites.clu.repl_monitor_test:dsrc.py:76 dsrc path: /root/.dsrc [35mDEBUG [0m tests.suites.clu.repl_monitor_test:dsrc.py:77 dsrc container path: /data/config/container.inf [35mDEBUG [0m tests.suites.clu.repl_monitor_test:dsrc.py:85 dsrc instances: [] [35mDEBUG [0m tests.suites.clu.repl_monitor_test:dsrc.py:210 dsrc completed with {'connections': None, 'aliases': None} [32mINFO [0m tests.suites.clu.repl_monitor_test:replication.py:438 Supplier: localhost.localdomain:39001 [32mINFO [0m tests.suites.clu.repl_monitor_test:replication.py:443 ------------------------------------- [32mINFO [0m tests.suites.clu.repl_monitor_test:replication.py:455 Replica Root: dc=example,dc=com [32mINFO [0m tests.suites.clu.repl_monitor_test:replication.py:456 Replica ID: 1 [32mINFO [0m tests.suites.clu.repl_monitor_test:replication.py:457 Replica Status: Available [32mINFO [0m tests.suites.clu.repl_monitor_test:replication.py:458 Max CSN: 5fa9e473000000010000 [32mINFO [0m tests.suites.clu.repl_monitor_test:replication.py:461 Status For Agreement: "002" (ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002) Replica Enabled: on Update In Progress: FALSE Last Update Start: 20201110005308Z Last Update End: 20201110005308Z Number Of Changes Sent: 1:2/0 Number Of Changes Skipped: None Last Update Status: Error (0) Replica acquired successfully: Incremental update succeeded Last Init Start: 19700101000000Z Last Init End: 19700101000000Z Last Init Status: unavailable Reap Active: 0 Replication Status: In Synchronization Replication Lag Time: 00:00:00 [32mINFO [0m tests.suites.clu.repl_monitor_test:replication.py:438 Supplier: ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m tests.suites.clu.repl_monitor_test:replication.py:443 ---------------------------------------------------------------- [32mINFO [0m tests.suites.clu.repl_monitor_test:replication.py:455 Replica Root: dc=example,dc=com [32mINFO [0m tests.suites.clu.repl_monitor_test:replication.py:456 Replica ID: 2 [32mINFO [0m tests.suites.clu.repl_monitor_test:replication.py:457 Replica Status: Available [32mINFO [0m tests.suites.clu.repl_monitor_test:replication.py:458 Max CSN: 5fa9e474000000020000 [32mINFO [0m tests.suites.clu.repl_monitor_test:replication.py:461 Status For Agreement: "001" (ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001) Replica Enabled: on Update In Progress: FALSE Last Update Start: 20201110005308Z Last Update End: 20201110005308Z Number Of Changes Sent: 2:2/0 Number Of Changes Skipped: None Last Update Status: Error (0) Replica acquired successfully: Incremental update succeeded Last Init Start: 19700101000000Z Last Init End: 19700101000000Z Last Init Status: unavailable Reap Active: 0 Replication Status: In Synchronization Replication Lag Time: 00:00:00 [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Replica Root: dc=example,dc=com" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Replica ID: 1" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Replica Status: Available" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Max CSN" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Status For Agreement: "002" (ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002)" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Replica Enabled: on" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Update In Progress: FALSE" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Last Update Start:" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Last Update End:" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Number Of Changes Sent:" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Number Of Changes Skipped: None" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Last Update Status: Error (0) Replica acquired successfully: Incremental update succeeded" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Last Init Start:" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Last Init End:" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Last Init Status:" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Reap Active: 0" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Replication Status: In Synchronization" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Replication Lag Time:" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Supplier: " is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Replica Root: dc=example,dc=com" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Replica ID: 2" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Status For Agreement: "001" (ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001)" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:52 Check for "Supplier: ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001" [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:60 Reset log file [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:174 Run replication monitor with aliases option [35mDEBUG [0m tests.suites.clu.repl_monitor_test:dsrc.py:76 dsrc path: /root/.dsrc [35mDEBUG [0m tests.suites.clu.repl_monitor_test:dsrc.py:77 dsrc container path: /data/config/container.inf [35mDEBUG [0m tests.suites.clu.repl_monitor_test:dsrc.py:85 dsrc instances: [] [35mDEBUG [0m tests.suites.clu.repl_monitor_test:dsrc.py:210 dsrc completed with {'connections': None, 'aliases': None} [32mINFO [0m tests.suites.clu.repl_monitor_test:replication.py:438 Supplier: localhost.localdomain:39001 [32mINFO [0m tests.suites.clu.repl_monitor_test:replication.py:443 ------------------------------------- [32mINFO [0m tests.suites.clu.repl_monitor_test:replication.py:455 Replica Root: dc=example,dc=com [32mINFO [0m tests.suites.clu.repl_monitor_test:replication.py:456 Replica ID: 1 [32mINFO [0m tests.suites.clu.repl_monitor_test:replication.py:457 Replica Status: Available [32mINFO [0m tests.suites.clu.repl_monitor_test:replication.py:458 Max CSN: 5fa9e473000000010000 [32mINFO [0m tests.suites.clu.repl_monitor_test:replication.py:461 Status For Agreement: "002" (ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002) Replica Enabled: on Update In Progress: FALSE Last Update Start: 20201110005308Z Last Update End: 20201110005308Z Number Of Changes Sent: 1:2/0 Number Of Changes Skipped: None Last Update Status: Error (0) Replica acquired successfully: Incremental update succeeded Last Init Start: 19700101000000Z Last Init End: 19700101000000Z Last Init Status: unavailable Reap Active: 0 Replication Status: In Synchronization Replication Lag Time: 00:00:00 [32mINFO [0m tests.suites.clu.repl_monitor_test:replication.py:438 Supplier: M2 (ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002) [32mINFO [0m tests.suites.clu.repl_monitor_test:replication.py:443 --------------------------------------------------------------------- [32mINFO [0m tests.suites.clu.repl_monitor_test:replication.py:455 Replica Root: dc=example,dc=com [32mINFO [0m tests.suites.clu.repl_monitor_test:replication.py:456 Replica ID: 2 [32mINFO [0m tests.suites.clu.repl_monitor_test:replication.py:457 Replica Status: Available [32mINFO [0m tests.suites.clu.repl_monitor_test:replication.py:458 Max CSN: 5fa9e474000000020000 [32mINFO [0m tests.suites.clu.repl_monitor_test:replication.py:461 Status For Agreement: "001" (ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001) Replica Enabled: on Update In Progress: FALSE Last Update Start: 20201110005308Z Last Update End: 20201110005308Z Number Of Changes Sent: 2:2/0 Number Of Changes Skipped: None Last Update Status: Error (0) Replica acquired successfully: Incremental update succeeded Last Init Start: 19700101000000Z Last Init End: 19700101000000Z Last Init Status: unavailable Reap Active: 0 Replication Status: In Synchronization Replication Lag Time: 00:00:00 [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Replica Root: dc=example,dc=com" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Replica ID: 1" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Replica Status: Available" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Max CSN" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Status For Agreement: "002" (ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002)" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Replica Enabled: on" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Update In Progress: FALSE" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Last Update Start:" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Last Update End:" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Number Of Changes Sent:" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Number Of Changes Skipped: None" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Last Update Status: Error (0) Replica acquired successfully: Incremental update succeeded" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Last Init Start:" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Last Init End:" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Last Init Status:" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Reap Active: 0" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Replication Status: In Synchronization" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Replication Lag Time:" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Supplier: " is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Replica Root: dc=example,dc=com" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Replica ID: 2" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Status For Agreement: "001" (ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001)" is present [32mINFO [0m tests.suites.clu.repl_monitor_test:repl_monitor_test.py:52 Check for "['Supplier: M1 (ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001)', 'Supplier: M2 (ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002)']" | |||
Failed | suites/clu/schema_test.py::test_origins_with_extra_parenthesis | 0.19 | |
topo = <lib389.topologies.TopologyMain object at 0x7faa59e3c0a0> def test_origins_with_extra_parenthesis(topo): """Test the custom schema with extra parenthesis in X-ORIGIN can be parsed into JSON :id: 4230f83b-0dc3-4bc4-a7a8-5ab0826a4f05 :setup: Standalone Instance :steps: 1. Add attribute with X-ORIGIN that contains extra parenthesis 2. Querying for that attribute with JSON flag :expectedresults: 1. Success 2. Success """ ATTR_NAME = 'testAttribute' X_ORG_VAL = 'test (TEST)' schema = Schema(topo.standalone) # Add new attribute parameters = { 'names': [ATTR_NAME], 'oid': '1.1.1.1.1.1.1.22222', 'desc': 'Test extra parenthesis in X-ORIGIN', 'x_origin': X_ORG_VAL, 'syntax': '1.3.6.1.4.1.1466.115.121.1.15', 'syntax_len': None, 'x_ordered': None, 'collective': None, 'obsolete': None, 'single_value': None, 'no_user_mod': None, 'equality': None, 'substr': None, 'ordering': None, 'usage': None, 'sup': None } > schema.add_attributetype(parameters) suites/clu/schema_test.py:47: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.8/site-packages/lib389/schema.py:321: in add_attributetype return self._add_schema_object(parameters, AttributeType) /usr/local/lib/python3.8/site-packages/lib389/schema.py:215: in _add_schema_object return self.add(attr_name, str(schema_object)) /usr/lib64/python3.8/site-packages/ldap/schema/models.py:320: in __str__ result.append(self.key_list('X-ORIGIN',self.x_origin,quoted=1)) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <ldap.schema.models.AttributeType object at 0x7faa59f78700> key = 'X-ORIGIN', values = 'test (TEST)', sep = ' ', quoted = 1 def key_list(self,key,values,sep=' ',quoted=0): > assert type(values)==tuple,TypeError("values has to be a tuple, was %r" % values) E AssertionError: values has to be a tuple, was 'test (TEST)' /usr/lib64/python3.8/site-packages/ldap/schema/models.py:79: AssertionError -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Failed | suites/fourwaymmr/fourwaymmr_test.py::test_replica_backup_and_restore | 49.29 | |
topo_m4 = <lib389.topologies.TopologyMain object at 0x7faa56349850> @pytest.mark.bz830335 def test_replica_backup_and_restore(topo_m4): """Test Backup and restore :id: 5ad1b85c-e765-11e8-9668-8c16451d917b :setup: standalone :steps: 1. Add entries 2. Take backup db2ldif on master1 3. Delete entries on master1 4. Restore entries ldif2db 5. Check entries :expected results: 1. Should success 2. Should success 3. Should success 4. Should success 5. Should success """ # Testing bug #830335: Taking a replica backup and Restore on M1 after deleting few entries from M1 nad M2 repl = ReplicationManager(DEFAULT_SUFFIX) users = UserAccounts(topo_m4.ms["master3"], DEFAULT_SUFFIX) for i in range(20, 25): users.create_test_user(uid=i) time.sleep(1) repl.wait_for_replication(topo_m4.ms["master1"], topo_m4.ms["master2"]) repl.test_replication(topo_m4.ms["master1"], topo_m4.ms["master2"], 30) topo_m4.ms["master1"].stop() topo_m4.ms["master1"].db2ldif( bename=DEFAULT_BENAME, suffixes=[DEFAULT_SUFFIX], excludeSuffixes=[], encrypt=False, repl_data=True, outputfile="/tmp/output_file", ) topo_m4.ms["master1"].start() for i in users.list(): topo_m4.ms["master1"].delete_s(i.dn) repl.wait_for_replication(topo_m4.ms["master1"], topo_m4.ms["master2"]) repl.test_replication(topo_m4.ms["master1"], topo_m4.ms["master2"], 30) topo_m4.ms["master1"].stop() topo_m4.ms["master1"].ldif2db( bename=None, excludeSuffixes=None, encrypt=False, suffixes=[DEFAULT_SUFFIX], import_file="/tmp/output_file", ) topo_m4.ms["master1"].start() for i in range(20, 25): users.create_test_user(uid=i) time.sleep(1) > repl.wait_for_replication(topo_m4.ms["master1"], topo_m4.ms["master2"]) suites/fourwaymmr/fourwaymmr_test.py:471: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.replica.ReplicationManager object at 0x7faa5456c3d0> from_instance = <lib389.DirSrv object at 0x7faa5b552430> to_instance = <lib389.DirSrv object at 0x7faa555ef1f0>, timeout = 20 def wait_for_replication(self, from_instance, to_instance, timeout=20): """Wait for a replication event to occur from instance to instance. This shows some point of synchronisation has occured. :param from_instance: The instance whos state we we want to check from :type from_instance: lib389.DirSrv :param to_instance: The instance whos state we want to check matches from. :type to_instance: lib389.DirSrv :param timeout: Fail after timeout seconds. :type timeout: int """ # Touch something then wait_for_replication. from_groups = Groups(from_instance, basedn=self._suffix, rdn=None) to_groups = Groups(to_instance, basedn=self._suffix, rdn=None) from_group = from_groups.get('replication_managers') to_group = to_groups.get('replication_managers') change = str(uuid.uuid4()) from_group.replace('description', change) for i in range(0, timeout): desc = to_group.get_attr_val_utf8('description') if change == desc: self._log.info("SUCCESS: Replication from %s to %s is working" % (from_instance.ldapuri, to_instance.ldapuri)) return True self._log.info("Retry: Replication from %s to %s is NOT working (expect %s / got description=%s)" % (from_instance.ldapuri, to_instance.ldapuri, change, desc)) time.sleep(1) self._log.info("FAIL: Replication from %s to %s is NOT working (expect %s / got description=%s)" % (from_instance.ldapuri, to_instance.ldapuri, change, desc)) > raise Exception("Replication did not sync in time!") E Exception: Replication did not sync in time! /usr/local/lib/python3.8/site-packages/lib389/replica.py:2501: Exception ------------------------------Captured stderr call------------------------------ ldiffile: /tmp/output_file [09/Nov/2020:20:47:48.765099421 -0500] - INFO - slapd_exemode_ldif2db - Backend Instance: userRoot -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 3879a3c1-ab94-40e9-8f90-faf6a4be85b7 / got description=c3214a07-13ad-452b-884d-80f7d06f722b) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect acf92169-7e24-4cef-956c-a58471ee13d5 / got description=3879a3c1-ab94-40e9-8f90-faf6a4be85b7) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 43759809-7e35-41a8-b34f-d4dc2d100c39 / got description=acf92169-7e24-4cef-956c-a58471ee13d5) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 1ea83ad4-2cee-46ed-ae1d-0f15ee236012 / got description=43759809-7e35-41a8-b34f-d4dc2d100c39) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect aa1c8e19-7a65-44a4-b2ab-9aeb2f897d48 / got description=1ea83ad4-2cee-46ed-ae1d-0f15ee236012) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect aa1c8e19-7a65-44a4-b2ab-9aeb2f897d48 / got description=1ea83ad4-2cee-46ed-ae1d-0f15ee236012) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect aa1c8e19-7a65-44a4-b2ab-9aeb2f897d48 / got description=1ea83ad4-2cee-46ed-ae1d-0f15ee236012) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect aa1c8e19-7a65-44a4-b2ab-9aeb2f897d48 / got description=1ea83ad4-2cee-46ed-ae1d-0f15ee236012) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect aa1c8e19-7a65-44a4-b2ab-9aeb2f897d48 / got description=1ea83ad4-2cee-46ed-ae1d-0f15ee236012) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect aa1c8e19-7a65-44a4-b2ab-9aeb2f897d48 / got description=1ea83ad4-2cee-46ed-ae1d-0f15ee236012) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect aa1c8e19-7a65-44a4-b2ab-9aeb2f897d48 / got description=1ea83ad4-2cee-46ed-ae1d-0f15ee236012) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect aa1c8e19-7a65-44a4-b2ab-9aeb2f897d48 / got description=1ea83ad4-2cee-46ed-ae1d-0f15ee236012) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect aa1c8e19-7a65-44a4-b2ab-9aeb2f897d48 / got description=1ea83ad4-2cee-46ed-ae1d-0f15ee236012) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect aa1c8e19-7a65-44a4-b2ab-9aeb2f897d48 / got description=1ea83ad4-2cee-46ed-ae1d-0f15ee236012) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect aa1c8e19-7a65-44a4-b2ab-9aeb2f897d48 / got description=1ea83ad4-2cee-46ed-ae1d-0f15ee236012) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect aa1c8e19-7a65-44a4-b2ab-9aeb2f897d48 / got description=1ea83ad4-2cee-46ed-ae1d-0f15ee236012) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect aa1c8e19-7a65-44a4-b2ab-9aeb2f897d48 / got description=1ea83ad4-2cee-46ed-ae1d-0f15ee236012) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect aa1c8e19-7a65-44a4-b2ab-9aeb2f897d48 / got description=1ea83ad4-2cee-46ed-ae1d-0f15ee236012) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect aa1c8e19-7a65-44a4-b2ab-9aeb2f897d48 / got description=1ea83ad4-2cee-46ed-ae1d-0f15ee236012) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect aa1c8e19-7a65-44a4-b2ab-9aeb2f897d48 / got description=1ea83ad4-2cee-46ed-ae1d-0f15ee236012) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect aa1c8e19-7a65-44a4-b2ab-9aeb2f897d48 / got description=1ea83ad4-2cee-46ed-ae1d-0f15ee236012) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect aa1c8e19-7a65-44a4-b2ab-9aeb2f897d48 / got description=1ea83ad4-2cee-46ed-ae1d-0f15ee236012) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect aa1c8e19-7a65-44a4-b2ab-9aeb2f897d48 / got description=1ea83ad4-2cee-46ed-ae1d-0f15ee236012) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect aa1c8e19-7a65-44a4-b2ab-9aeb2f897d48 / got description=1ea83ad4-2cee-46ed-ae1d-0f15ee236012) [32mINFO [0m lib389.replica:replica.py:2500 FAIL: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect aa1c8e19-7a65-44a4-b2ab-9aeb2f897d48 / got description=1ea83ad4-2cee-46ed-ae1d-0f15ee236012) | |||
Failed | suites/gssapi/simple_gssapi_test.py::test_gssapi_bind | 0.32 | |
topology_st_gssapi = <lib389.topologies.TopologyMain object at 0x7faa45cbf520> testuser = <lib389.idm.user.UserAccount object at 0x7faa45dabc70> @gssapi_ack def test_gssapi_bind(topology_st_gssapi, testuser): """Test that we can bind with GSSAPI :id: 894a4c27-3d4c-4ba3-aa33-2910032e3783 :setup: standalone gssapi instance :steps: 1. Bind with sasl/gssapi :expectedresults: 1. Bind succeeds """ > conn = testuser.bind_gssapi() suites/gssapi/simple_gssapi_test.py:53: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.8/site-packages/lib389/idm/account.py:258: in bind_gssapi inst_clone.open(saslmethod='gssapi') /usr/local/lib/python3.8/site-packages/lib389/__init__.py:995: in open self.sasl_interactive_bind_s("", sasl_auth, escapehatch='i am sure') /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:476: in sasl_interactive_bind_s return self._ldap_call(self._l.sasl_interactive_bind_s,who,auth,RequestControlTuples(serverctrls),RequestControlTuples(clientctrls),sasl_flags) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa45dab910> func = <built-in method sasl_interactive_bind_s of LDAP object at 0x7faa45d6b7b0> args = ('', <ldap.sasl.gssapi object at 0x7faa45d6b430>, None, None, 2) kwargs = {}, diagnostic_message_success = None, exc_type = None exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.INVALID_CREDENTIALS: {'result': 49, 'desc': 'Invalid credentials', 'ctrls': [], 'info': 'SASL(-1): generic failure: GSSAPI Error: An invalid name was supplied (Included profile file could not be read)'} /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:324: INVALID_CREDENTIALS -----------------------------Captured stdout setup------------------------------ Kerberos master password: KpJvI5bSgLw1vfZip2TEFy1qKxbctwjHJOjSUqowee.a0JmBet06ELUYNA16BF6Ab Loading random data Initializing database '/var/kerberos/krb5kdc/principal' for realm 'HOSTED.UPSHIFT.RDU2.REDHAT.COM', master key name 'K/M@HOSTED.UPSHIFT.RDU2.REDHAT.COM' Authenticating as principal root/admin@HOSTED.UPSHIFT.RDU2.REDHAT.COM with password. Principal "ldap/ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com@HOSTED.UPSHIFT.RDU2.REDHAT.COM" created. Authenticating as principal root/admin@HOSTED.UPSHIFT.RDU2.REDHAT.COM with password. K/M@HOSTED.UPSHIFT.RDU2.REDHAT.COM kadmin/admin@HOSTED.UPSHIFT.RDU2.REDHAT.COM kadmin/changepw@HOSTED.UPSHIFT.RDU2.REDHAT.COM kadmin/ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com@HOSTED.UPSHIFT.RDU2.REDHAT.COM kiprop/ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com@HOSTED.UPSHIFT.RDU2.REDHAT.COM krbtgt/HOSTED.UPSHIFT.RDU2.REDHAT.COM@HOSTED.UPSHIFT.RDU2.REDHAT.COM ldap/ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com@HOSTED.UPSHIFT.RDU2.REDHAT.COM Authenticating as principal root/admin@HOSTED.UPSHIFT.RDU2.REDHAT.COM with password. Entry for principal ldap/ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com@HOSTED.UPSHIFT.RDU2.REDHAT.COM with kvno 2, encryption type aes256-cts-hmac-sha1-96 added to keytab WRFILE:/etc/krb5.keytab. Entry for principal ldap/ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com@HOSTED.UPSHIFT.RDU2.REDHAT.COM with kvno 2, encryption type aes128-cts-hmac-sha1-96 added to keytab WRFILE:/etc/krb5.keytab. Authenticating as principal root/admin@HOSTED.UPSHIFT.RDU2.REDHAT.COM with password. Principal "testuser@HOSTED.UPSHIFT.RDU2.REDHAT.COM" created. Authenticating as principal root/admin@HOSTED.UPSHIFT.RDU2.REDHAT.COM with password. K/M@HOSTED.UPSHIFT.RDU2.REDHAT.COM kadmin/admin@HOSTED.UPSHIFT.RDU2.REDHAT.COM kadmin/changepw@HOSTED.UPSHIFT.RDU2.REDHAT.COM kadmin/ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com@HOSTED.UPSHIFT.RDU2.REDHAT.COM kiprop/ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com@HOSTED.UPSHIFT.RDU2.REDHAT.COM krbtgt/HOSTED.UPSHIFT.RDU2.REDHAT.COM@HOSTED.UPSHIFT.RDU2.REDHAT.COM ldap/ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com@HOSTED.UPSHIFT.RDU2.REDHAT.COM testuser@HOSTED.UPSHIFT.RDU2.REDHAT.COM Authenticating as principal root/admin@HOSTED.UPSHIFT.RDU2.REDHAT.COM with password. Entry for principal testuser@HOSTED.UPSHIFT.RDU2.REDHAT.COM with kvno 2, encryption type aes256-cts-hmac-sha1-96 added to keytab WRFILE:/tmp/testuser.keytab. Entry for principal testuser@HOSTED.UPSHIFT.RDU2.REDHAT.COM with kvno 2, encryption type aes128-cts-hmac-sha1-96 added to keytab WRFILE:/tmp/testuser.keytab. -----------------------------Captured stderr setup------------------------------ No policy specified for ldap/ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com@HOSTED.UPSHIFT.RDU2.REDHAT.COM; defaulting to no policy No policy specified for testuser@HOSTED.UPSHIFT.RDU2.REDHAT.COM; defaulting to no policy -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Failed | suites/gssapi/simple_gssapi_test.py::test_support_mech | 0.37 | |
topology_st_gssapi = <lib389.topologies.TopologyMain object at 0x7faa45cbf520> testuser = <lib389.idm.user.UserAccount object at 0x7faa45dabc70> @gssapi_ack def test_support_mech(topology_st_gssapi, testuser): """Test allowed sasl mechs works when GSSAPI is allowed :id: 6ec80aca-00c4-4141-b96b-3ae8837fc751 :setup: standalone gssapi instance :steps: 1. Add GSSAPI to allowed sasl mechanisms. 2. Attempt to bind :expectedresults: 1. The allowed mechs are changed. 2. The bind succeeds. """ topology_st_gssapi.standalone.config.set('nsslapd-allowed-sasl-mechanisms', 'GSSAPI EXTERNAL ANONYMOUS') > conn = testuser.bind_gssapi() suites/gssapi/simple_gssapi_test.py:125: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.8/site-packages/lib389/idm/account.py:258: in bind_gssapi inst_clone.open(saslmethod='gssapi') /usr/local/lib/python3.8/site-packages/lib389/__init__.py:995: in open self.sasl_interactive_bind_s("", sasl_auth, escapehatch='i am sure') /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:476: in sasl_interactive_bind_s return self._ldap_call(self._l.sasl_interactive_bind_s,who,auth,RequestControlTuples(serverctrls),RequestControlTuples(clientctrls),sasl_flags) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa46021550> func = <built-in method sasl_interactive_bind_s of LDAP object at 0x7faa553d7180> args = ('', <ldap.sasl.gssapi object at 0x7faa553d7f70>, None, None, 2) kwargs = {}, diagnostic_message_success = None, exc_type = None exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.INVALID_CREDENTIALS: {'result': 49, 'desc': 'Invalid credentials', 'ctrls': [], 'info': 'SASL(-1): generic failure: GSSAPI Error: An invalid name was supplied (Included profile file could not be read)'} /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:324: INVALID_CREDENTIALS | |||
Failed | suites/healthcheck/health_security_test.py::test_healthcheck_certif_expiring_within_30d | 13.68 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7faa45d6afd0> @pytest.mark.ds50873 @pytest.mark.bz1685160 @pytest.mark.xfail(ds_is_older("1.4.1"), reason="Not implemented") def test_healthcheck_certif_expiring_within_30d(topology_st): """Check if HealthCheck returns DSCERTLE0001 code :id: c2165032-88ba-4978-a4ca-2fecfd8c35d8 :setup: Standalone instance :steps: 1. Create DS instance 2. Use libfaketime to tell the process the date is within 30 days before certificate expiration 3. Use HealthCheck without --json option 4. Use HealthCheck with --json option :expectedresults: 1. Success 2. Success 3. Healthcheck reports DSCERTLE0001 code and related details 4. Healthcheck reports DSCERTLE0001 code and related details """ RET_CODE = 'DSCERTLE0001' standalone = topology_st.standalone standalone.enable_tls() # Cert is valid two years from today, so we count the date that is within 30 days before certificate expiration date_future = datetime.now() + timedelta(days=701) with libfaketime.fake_time(date_future): time.sleep(1) > run_healthcheck_and_flush_log(topology_st, standalone, RET_CODE, json=False) suites/healthcheck/health_security_test.py:304: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ topology = <lib389.topologies.TopologyMain object at 0x7faa45d6afd0> instance = <lib389.DirSrv object at 0x7faa45d40280> searched_code = 'DSCERTLE0001', json = False, searched_code2 = None def run_healthcheck_and_flush_log(topology, instance, searched_code, json, searched_code2=None): args = FakeArgs() args.instance = instance.serverid args.verbose = instance.verbose args.list_errors = False args.list_checks = False args.check = ['config', 'encryption', 'tls', 'fschecks'] args.dry_run = False if json: log.info('Use healthcheck with --json option') args.json = json health_check_run(instance, topology.logcap.log, args) assert topology.logcap.contains(searched_code) log.info('Healthcheck returned searched code: %s' % searched_code) if searched_code2 is not None: assert topology.logcap.contains(searched_code2) log.info('Healthcheck returned searched code: %s' % searched_code2) else: log.info('Use healthcheck without --json option') args.json = json health_check_run(instance, topology.logcap.log, args) > assert topology.logcap.contains(searched_code) E AssertionError: assert False E + where False = <bound method LogCapture.contains of <LogCapture (NOTSET)>>('DSCERTLE0001') E + where <bound method LogCapture.contains of <LogCapture (NOTSET)>> = <LogCapture (NOTSET)>.contains E + where <LogCapture (NOTSET)> = <lib389.topologies.TopologyMain object at 0x7faa45d6afd0>.logcap suites/healthcheck/health_security_test.py:67: AssertionError -------------------------------Captured log call-------------------------------- [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking config:hr_timestamp ... [32mINFO [0m LogCapture:health.py:99 Checking config:passwordscheme ... [32mINFO [0m LogCapture:health.py:99 Checking encryption:check_tls_version ... [32mINFO [0m LogCapture:health.py:99 Checking tls:certificate_expiration ... [32mINFO [0m LogCapture:health.py:99 Checking fschecks:file_perms ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:111 No issues found. | |||
Failed | suites/healthcheck/health_security_test.py::test_healthcheck_certif_expired | 14.44 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7faa45d6afd0> @pytest.mark.ds50873 @pytest.mark.bz1685160 @pytest.mark.xfail(ds_is_older("1.4.1"), reason="Not implemented") def test_healthcheck_certif_expired(topology_st): """Check if HealthCheck returns DSCERTLE0002 code :id: ceff2c22-62c0-4fd9-b737-930a88458d68 :setup: Standalone instance :steps: 1. Create DS instance 2. Use libfaketime to tell the process the date is after certificate expiration 3. Use HealthCheck without --json option 4. Use HealthCheck with --json option :expectedresults: 1. Success 2. Success 3. Healthcheck reports DSCERTLE0002 code and related details 4. Healthcheck reports DSCERTLE0002 code and related details """ RET_CODE = 'DSCERTLE0002' standalone = topology_st.standalone standalone.enable_tls() # Cert is valid two years from today, so we count the date that is after expiration date_future = datetime.now() + timedelta(days=731) with libfaketime.fake_time(date_future): time.sleep(1) > run_healthcheck_and_flush_log(topology_st, standalone, RET_CODE, json=False) suites/healthcheck/health_security_test.py:343: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ topology = <lib389.topologies.TopologyMain object at 0x7faa45d6afd0> instance = <lib389.DirSrv object at 0x7faa45d40280> searched_code = 'DSCERTLE0002', json = False, searched_code2 = None def run_healthcheck_and_flush_log(topology, instance, searched_code, json, searched_code2=None): args = FakeArgs() args.instance = instance.serverid args.verbose = instance.verbose args.list_errors = False args.list_checks = False args.check = ['config', 'encryption', 'tls', 'fschecks'] args.dry_run = False if json: log.info('Use healthcheck with --json option') args.json = json health_check_run(instance, topology.logcap.log, args) assert topology.logcap.contains(searched_code) log.info('Healthcheck returned searched code: %s' % searched_code) if searched_code2 is not None: assert topology.logcap.contains(searched_code2) log.info('Healthcheck returned searched code: %s' % searched_code2) else: log.info('Use healthcheck without --json option') args.json = json health_check_run(instance, topology.logcap.log, args) > assert topology.logcap.contains(searched_code) E AssertionError: assert False E + where False = <bound method LogCapture.contains of <LogCapture (NOTSET)>>('DSCERTLE0002') E + where <bound method LogCapture.contains of <LogCapture (NOTSET)>> = <LogCapture (NOTSET)>.contains E + where <LogCapture (NOTSET)> = <lib389.topologies.TopologyMain object at 0x7faa45d6afd0>.logcap suites/healthcheck/health_security_test.py:67: AssertionError -------------------------------Captured log call-------------------------------- [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking config:hr_timestamp ... [32mINFO [0m LogCapture:health.py:99 Checking config:passwordscheme ... [32mINFO [0m LogCapture:health.py:99 Checking encryption:check_tls_version ... [32mINFO [0m LogCapture:health.py:99 Checking tls:certificate_expiration ... [32mINFO [0m LogCapture:health.py:99 Checking fschecks:file_perms ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:119 2 Issues found! Generating report ... [32mINFO [0m LogCapture:health.py:45 [1] DS Lint Error: DSCERTLE0001 [32mINFO [0m LogCapture:health.py:46 -------------------------------------------------------------------------------- [32mINFO [0m LogCapture:health.py:47 Severity: MEDIUM [32mINFO [0m LogCapture:health.py:49 Check: tls:certificate_expiration [32mINFO [0m LogCapture:health.py:50 Affects: [32mINFO [0m LogCapture:health.py:52 -- Expiring Certificate [32mINFO [0m LogCapture:health.py:53 Details: [32mINFO [0m LogCapture:health.py:54 ----------- [32mINFO [0m LogCapture:health.py:55 The certificate (Self-Signed-CA) will expire in less than 30 days [32mINFO [0m LogCapture:health.py:56 Resolution: [32mINFO [0m LogCapture:health.py:57 ----------- [32mINFO [0m LogCapture:health.py:58 Renew the certificate before it expires to prevent disruptions with TLS connections. [32mINFO [0m LogCapture:health.py:45 [2] DS Lint Error: DSCERTLE0001 [32mINFO [0m LogCapture:health.py:46 -------------------------------------------------------------------------------- [32mINFO [0m LogCapture:health.py:47 Severity: MEDIUM [32mINFO [0m LogCapture:health.py:49 Check: tls:certificate_expiration [32mINFO [0m LogCapture:health.py:50 Affects: [32mINFO [0m LogCapture:health.py:52 -- Expiring Certificate [32mINFO [0m LogCapture:health.py:53 Details: [32mINFO [0m LogCapture:health.py:54 ----------- [32mINFO [0m LogCapture:health.py:55 The certificate (Server-Cert) will expire in less than 30 days [32mINFO [0m LogCapture:health.py:56 Resolution: [32mINFO [0m LogCapture:health.py:57 ----------- [32mINFO [0m LogCapture:health.py:58 Renew the certificate before it expires to prevent disruptions with TLS connections. [32mINFO [0m LogCapture:health.py:124 ===== End Of Report (2 Issues found) ===== | |||
Failed | suites/paged_results/paged_results_test.py::test_search_paged_limits[conf_attr_values1-PASS] | 6.11 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7faa5583ef40> create_user = <lib389.idm.user.UserAccount object at 0x7faa558336d0> conf_attr_values = ('5000', '120', '122'), expected_rs = 'PASS' @pytest.mark.parametrize('conf_attr_values,expected_rs', ((('5000', '100', '100'), ldap.ADMINLIMIT_EXCEEDED), (('5000', '120', '122'), 'PASS'))) def test_search_paged_limits(topology_st, create_user, conf_attr_values, expected_rs): """Verify that nsslapd-idlistscanlimit and nsslapd-lookthroughlimit can limit the administrator search abilities. :id: e0f8b916-7276-4bd3-9e73-8696a4468811 :parametrized: yes :setup: Standalone instance, test user for binding, 10 users for the search base :steps: 1. Set nsslapd-sizelimit and nsslapd-pagedsizelimit to 5000 2. Set nsslapd-idlistscanlimit: 120 3. Set nsslapd-lookthroughlimit: 122 4. Bind as test user 5. Search through added users with a simple paged control using page_size = 10 6. Bind as Directory Manager 7. Set nsslapd-idlistscanlimit: 100 8. Set nsslapd-lookthroughlimit: 100 9. Bind as test user 10. Search through added users with a simple paged control using page_size = 10 :expectedresults: 1. nsslapd-sizelimit and nsslapd-pagedsizelimit should be successfully set 2. nsslapd-idlistscanlimit should be successfully set 3. nsslapd-lookthroughlimit should be successfully set 4. Bind should be successful 5. No error happens, all users should be found 6. Bind should be successful 7. nsslapd-idlistscanlimit should be successfully set 8. nsslapd-lookthroughlimit should be successfully set 9. Bind should be successful 10. It should throw ADMINLIMIT_EXCEEDED exception """ users_num = 101 page_size = 10 users_list = add_users(topology_st, users_num, DEFAULT_SUFFIX) search_flt = r'(uid=test*)' searchreq_attrlist = ['dn', 'sn'] size_attr_bck = change_conf_attr(topology_st, DN_CONFIG, 'nsslapd-sizelimit', conf_attr_values[0]) pagedsize_attr_bck = change_conf_attr(topology_st, DN_CONFIG, 'nsslapd-pagedsizelimit', conf_attr_values[0]) idlistscan_attr_bck = change_conf_attr(topology_st, 'cn=config,%s' % DN_LDBM, 'nsslapd-idlistscanlimit', conf_attr_values[1]) lookthrough_attr_bck = change_conf_attr(topology_st, 'cn=config,%s' % DN_LDBM, 'nsslapd-lookthroughlimit', conf_attr_values[2]) try: log.info('Set user bind') conn = create_user.bind(TEST_USER_PWD) req_ctrl = SimplePagedResultsControl(True, size=page_size, cookie='') controls = [req_ctrl] if expected_rs == ldap.ADMINLIMIT_EXCEEDED: log.info('Expect to fail with ADMINLIMIT_EXCEEDED') with pytest.raises(expected_rs): all_results = paged_search(conn, DEFAULT_SUFFIX, controls, search_flt, searchreq_attrlist) elif expected_rs == 'PASS': log.info('Expect to pass') > all_results = paged_search(conn, DEFAULT_SUFFIX, controls, search_flt, searchreq_attrlist) suites/paged_results/paged_results_test.py:901: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ suites/paged_results/paged_results_test.py:200: in paged_search rtype, rdata, rmsgid, rctrls = conn.result3(msgid) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:764: in result3 resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:774: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa46362610> func = <built-in method result4 of LDAP object at 0x7faa55820e70> args = (12, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.ADMINLIMIT_EXCEEDED: {'msgtype': 100, 'msgid': 12, 'result': 11, 'desc': 'Administrative limit exceeded', 'ctrls': []} /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:324: ADMINLIMIT_EXCEEDED -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 101 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-sizelimit to 5000. Previous value - b'2000'. Modified suffix - cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to 5000. Previous value - b'0'. Modified suffix - cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-idlistscanlimit to 120. Previous value - b'4000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-lookthroughlimit to 122. Previous value - b'5000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:889 Set user bind [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:900 Expect to pass [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 10; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7faa45ad7700>]. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 6 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 7 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 8 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 9 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 101 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-sizelimit to b'2000'. Previous value - b'5000'. Modified suffix - cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to b'0'. Previous value - b'5000'. Modified suffix - cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-lookthroughlimit to b'5000'. Previous value - b'122'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-idlistscanlimit to b'4000'. Previous value - b'120'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. | |||
Failed | suites/paged_results/paged_results_test.py::test_search_paged_user_limits[conf_attr_values1-PASS] | 6.36 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7faa5583ef40> create_user = <lib389.idm.user.UserAccount object at 0x7faa558336d0> conf_attr_values = ('1000', '120', '122'), expected_rs = 'PASS' @pytest.mark.parametrize('conf_attr_values,expected_rs', ((('1000', '100', '100'), ldap.ADMINLIMIT_EXCEEDED), (('1000', '120', '122'), 'PASS'))) def test_search_paged_user_limits(topology_st, create_user, conf_attr_values, expected_rs): """Verify that nsPagedIDListScanLimit and nsPagedLookthroughLimit override nsslapd-idlistscanlimit and nsslapd-lookthroughlimit while performing search with the simple paged results control. :id: 69e393e9-1ab8-4f4e-b4a1-06ca63dc7b1b :parametrized: yes :setup: Standalone instance, test user for binding, 10 users for the search base :steps: 1. Set nsslapd-idlistscanlimit: 1000 2. Set nsslapd-lookthroughlimit: 1000 3. Set nsPagedIDListScanLimit: 120 4. Set nsPagedLookthroughLimit: 122 5. Bind as test user 6. Search through added users with a simple paged control using page_size = 10 7. Bind as Directory Manager 8. Set nsPagedIDListScanLimit: 100 9. Set nsPagedLookthroughLimit: 100 10. Bind as test user 11. Search through added users with a simple paged control using page_size = 10 :expectedresults: 1. nsslapd-idlistscanlimit should be successfully set 2. nsslapd-lookthroughlimit should be successfully set 3. nsPagedIDListScanLimit should be successfully set 4. nsPagedLookthroughLimit should be successfully set 5. Bind should be successful 6. No error happens, all users should be found 7. Bind should be successful 8. nsPagedIDListScanLimit should be successfully set 9. nsPagedLookthroughLimit should be successfully set 10. Bind should be successful 11. It should throw ADMINLIMIT_EXCEEDED exception """ users_num = 101 page_size = 10 users_list = add_users(topology_st, users_num, DEFAULT_SUFFIX) search_flt = r'(uid=test*)' searchreq_attrlist = ['dn', 'sn'] lookthrough_attr_bck = change_conf_attr(topology_st, 'cn=config,%s' % DN_LDBM, 'nsslapd-lookthroughlimit', conf_attr_values[0]) idlistscan_attr_bck = change_conf_attr(topology_st, 'cn=config,%s' % DN_LDBM, 'nsslapd-idlistscanlimit', conf_attr_values[0]) user_idlistscan_attr_bck = change_conf_attr(topology_st, create_user.dn, 'nsPagedIDListScanLimit', conf_attr_values[1]) user_lookthrough_attr_bck = change_conf_attr(topology_st, create_user.dn, 'nsPagedLookthroughLimit', conf_attr_values[2]) try: log.info('Set user bind') conn = create_user.bind(TEST_USER_PWD) req_ctrl = SimplePagedResultsControl(True, size=page_size, cookie='') controls = [req_ctrl] if expected_rs == ldap.ADMINLIMIT_EXCEEDED: log.info('Expect to fail with ADMINLIMIT_EXCEEDED') with pytest.raises(expected_rs): all_results = paged_search(conn, DEFAULT_SUFFIX, controls, search_flt, searchreq_attrlist) elif expected_rs == 'PASS': log.info('Expect to pass') > all_results = paged_search(conn, DEFAULT_SUFFIX, controls, search_flt, searchreq_attrlist) suites/paged_results/paged_results_test.py:975: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ suites/paged_results/paged_results_test.py:200: in paged_search rtype, rdata, rmsgid, rctrls = conn.result3(msgid) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:764: in result3 resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:774: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa45b48c70> func = <built-in method result4 of LDAP object at 0x7faa545536c0> args = (12, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.ADMINLIMIT_EXCEEDED: {'msgtype': 100, 'msgid': 12, 'result': 11, 'desc': 'Administrative limit exceeded', 'ctrls': []} /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:324: ADMINLIMIT_EXCEEDED -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 101 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-lookthroughlimit to 1000. Previous value - b'5000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-idlistscanlimit to 1000. Previous value - b'4000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedIDListScanLimit to 120. Previous value - None. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedLookthroughLimit to 122. Previous value - None. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:963 Set user bind [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:974 Expect to pass [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 10; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7faa54e208b0>]. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 6 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 7 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 8 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 9 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 101 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-lookthroughlimit to b'5000'. Previous value - b'1000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-idlistscanlimit to b'4000'. Previous value - b'1000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedIDListScanLimit to None. Previous value - b'120'. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedLookthroughLimit to None. Previous value - b'122'. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com. | |||
Failed | suites/replication/conflict_resolve_test.py::TestTwoMasters::test_complex_add_modify_modrdn_delete | 90.38 | |
self = <tests.suites.replication.conflict_resolve_test.TestTwoMasters object at 0x7faa46576b50> topology_m2 = <lib389.topologies.TopologyMain object at 0x7faa5583d820> base_m2 = <lib389.idm.nscontainer.nsContainer object at 0x7faa46561100> def test_complex_add_modify_modrdn_delete(self, topology_m2, base_m2): """Check that conflict properly resolved for complex operations which involve add, modify, modrdn and delete :id: 77f09b18-03d1-45da-940b-1ad2c2908eb1 :setup: Two master replication, test container for entries, enable plugin logging, audit log, error log for replica and access log for internal :steps: 1. Add ten users to m1 and wait for replication to happen 2. Pause replication 3. Test add-del on m1 and add on m2 4. Test add-mod on m1 and add on m2 5. Test add-modrdn on m1 and add on m2 6. Test multiple add, modrdn 7. Test Add-del on both masters 8. Test modrdn-modrdn 9. Test modrdn-del 10. Resume replication 11. Check that the entries on both masters are the same and replication is working :expectedresults: 1. It should pass 2. It should pass 3. It should pass 4. It should pass 5. It should pass 6. It should pass 7. It should pass 8. It should pass 9. It should pass 10. It should pass 11. It should pass """ M1 = topology_m2.ms["master1"] M2 = topology_m2.ms["master2"] test_users_m1 = UserAccounts(M1, base_m2.dn, rdn=None) test_users_m2 = UserAccounts(M2, base_m2.dn, rdn=None) repl = ReplicationManager(SUFFIX) for user_num in range(1100, 1110): _create_user(test_users_m1, user_num) repl.test_replication(M1, M2) topology_m2.pause_all_replicas() log.info("Test add-del on M1 and add on M2") user_num += 1 _create_user(test_users_m1, user_num) _delete_user(test_users_m1, user_num, sleep=True) _create_user(test_users_m2, user_num, sleep=True) user_num += 1 _create_user(test_users_m1, user_num, sleep=True) _create_user(test_users_m2, user_num, sleep=True) _delete_user(test_users_m1, user_num, sleep=True) user_num += 1 _create_user(test_users_m2, user_num, sleep=True) _create_user(test_users_m1, user_num) _delete_user(test_users_m1, user_num) log.info("Test add-mod on M1 and add on M2") user_num += 1 _create_user(test_users_m1, user_num) _modify_user(test_users_m1, user_num, sleep=True) _create_user(test_users_m2, user_num, sleep=True) user_num += 1 _create_user(test_users_m1, user_num, sleep=True) _create_user(test_users_m2, user_num, sleep=True) _modify_user(test_users_m1, user_num, sleep=True) user_num += 1 _create_user(test_users_m2, user_num, sleep=True) _create_user(test_users_m1, user_num) _modify_user(test_users_m1, user_num) log.info("Test add-modrdn on M1 and add on M2") user_num += 1 _create_user(test_users_m1, user_num) _rename_user(test_users_m1, user_num, user_num+20, sleep=True) _create_user(test_users_m2, user_num, sleep=True) user_num += 1 _create_user(test_users_m1, user_num, sleep=True) _create_user(test_users_m2, user_num, sleep=True) _rename_user(test_users_m1, user_num, user_num+20, sleep=True) user_num += 1 _create_user(test_users_m2, user_num, sleep=True) _create_user(test_users_m1, user_num) _rename_user(test_users_m1, user_num, user_num+20) log.info("Test multiple add, modrdn") user_num += 1 _create_user(test_users_m1, user_num, sleep=True) _create_user(test_users_m2, user_num, sleep=True) _rename_user(test_users_m1, user_num, user_num+20) _create_user(test_users_m1, user_num, sleep=True) _modify_user(test_users_m2, user_num, sleep=True) log.info("Add - del on both masters") user_num += 1 _create_user(test_users_m1, user_num) _delete_user(test_users_m1, user_num, sleep=True) _create_user(test_users_m2, user_num) _delete_user(test_users_m2, user_num, sleep=True) log.info("Test modrdn - modrdn") user_num += 1 _rename_user(test_users_m1, 1109, 1129, sleep=True) _rename_user(test_users_m2, 1109, 1129, sleep=True) log.info("Test modrdn - del") user_num += 1 _rename_user(test_users_m1, 1100, 1120, sleep=True) _delete_user(test_users_m2, 1100) user_num += 1 _delete_user(test_users_m2, 1101, sleep=True) _rename_user(test_users_m1, 1101, 1121) topology_m2.resume_all_replicas() repl.test_replication_topology(topology_m2) time.sleep(30) user_dns_m1 = [user.dn for user in test_users_m1.list()] user_dns_m2 = [user.dn for user in test_users_m2.list()] > assert set(user_dns_m1) == set(user_dns_m2) E AssertionError: assert {'uid=test_us...,dc=com', ...} == {'uid=test_us...,dc=com', ...} E Extra items in the left set: E 'uid=test_user_1111,cn=test_container,dc=example,dc=com' E 'uid=test_user_1112,cn=test_container,dc=example,dc=com' E 'uid=test_user_1117,cn=test_container,dc=example,dc=com' E Full diff: E { E 'uid=test_user_1102,cn=test_container,dc=example,dc=com',... E E ...Full output truncated (24 lines hidden), use '-vv' to show suites/replication/conflict_resolve_test.py:369: AssertionError -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect a80ca7c3-1a28-4b32-9433-436ba3fd6b9e / got description=929d149a-1f68-4344-8700-5138d6662434) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect a80ca7c3-1a28-4b32-9433-436ba3fd6b9e / got description=929d149a-1f68-4344-8700-5138d6662434) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect a80ca7c3-1a28-4b32-9433-436ba3fd6b9e / got description=929d149a-1f68-4344-8700-5138d6662434) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect a80ca7c3-1a28-4b32-9433-436ba3fd6b9e / got description=929d149a-1f68-4344-8700-5138d6662434) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect a80ca7c3-1a28-4b32-9433-436ba3fd6b9e / got description=929d149a-1f68-4344-8700-5138d6662434) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:285 Test add-del on M1 and add on M2 [32mINFO [0m tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:301 Test add-mod on M1 and add on M2 [32mINFO [0m tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:317 Test add-modrdn on M1 and add on M2 [32mINFO [0m tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:333 Test multiple add, modrdn [32mINFO [0m tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:341 Add - del on both masters [32mINFO [0m tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:348 Test modrdn - modrdn [32mINFO [0m tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:353 Test modrdn - del [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b827189a-83ac-4e90-abec-c05bb58c73f2 / got description=a80ca7c3-1a28-4b32-9433-436ba3fd6b9e) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b827189a-83ac-4e90-abec-c05bb58c73f2 / got description=a80ca7c3-1a28-4b32-9433-436ba3fd6b9e) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b827189a-83ac-4e90-abec-c05bb58c73f2 / got description=a80ca7c3-1a28-4b32-9433-436ba3fd6b9e) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b827189a-83ac-4e90-abec-c05bb58c73f2 / got description=a80ca7c3-1a28-4b32-9433-436ba3fd6b9e) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b827189a-83ac-4e90-abec-c05bb58c73f2 / got description=a80ca7c3-1a28-4b32-9433-436ba3fd6b9e) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b827189a-83ac-4e90-abec-c05bb58c73f2 / got description=a80ca7c3-1a28-4b32-9433-436ba3fd6b9e) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b827189a-83ac-4e90-abec-c05bb58c73f2 / got description=a80ca7c3-1a28-4b32-9433-436ba3fd6b9e) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b827189a-83ac-4e90-abec-c05bb58c73f2 / got description=a80ca7c3-1a28-4b32-9433-436ba3fd6b9e) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect feb3e3ec-0e00-45f6-bca0-065363228ed5 / got description=b827189a-83ac-4e90-abec-c05bb58c73f2) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working | |||
Failed | suites/replication/regression_test.py::test_online_init_should_create_keepalive_entries | 1.11 | |
topo_m2 = <lib389.topologies.TopologyMain object at 0x7faa555a3460> def test_online_init_should_create_keepalive_entries(topo_m2): """Check that keep alive entries are created when initializinf a master from another one :id: d5940e71-d18a-4b71-aaf7-b9185361fffe :setup: Two masters replication setup :steps: 1. Generate ldif without replication data 2 Init both masters from that ldif 3 Check that keep alive entries does not exists 4 Perform on line init of master2 from master1 5 Check that keep alive entries exists :expectedresults: 1. No error while generating ldif 2. No error while importing the ldif file 3. No keepalive entrie should exists on any masters 4. No error while initializing master2 5. All keepalive entries should exist on every masters """ repl = ReplicationManager(DEFAULT_SUFFIX) m1 = topo_m2.ms["master1"] m2 = topo_m2.ms["master2"] # Step 1: Generate ldif without replication data m1.stop() m2.stop() ldif_file = '%s/norepl.ldif' % m1.get_ldif_dir() m1.db2ldif(bename=DEFAULT_BENAME, suffixes=[DEFAULT_SUFFIX], excludeSuffixes=None, repl_data=False, outputfile=ldif_file, encrypt=False) # Remove replication metadata that are still in the ldif > _remove_replication_data(ldif_file) suites/replication/regression_test.py:1023: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ ldif_file = '/var/lib/dirsrv/slapd-master1/ldif/norepl.ldif' def _remove_replication_data(ldif_file): """ Remove the replication data from ldif file: db2lif without -r includes some of the replica data like - nsUniqueId - keepalive entries This function filters the ldif fil to remove these data """ > with open(ldif_file) as f: E FileNotFoundError: [Errno 2] No such file or directory: '/var/lib/dirsrv/slapd-master1/ldif/norepl.ldif' suites/replication/regression_test.py:120: FileNotFoundError ------------------------------Captured stderr call------------------------------ [09/Nov/2020:23:00:57.276345249 -0500] - INFO - dse_check_file - The config /etc/dirsrv/slapd-master1/dse.ldif can not be accessed. Attempting restore ... (reason: 0) [09/Nov/2020:23:00:57.276418838 -0500] - INFO - dse_check_file - The backup /etc/dirsrv/slapd-master1/dse.ldif.bak can not be accessed. Check it exists and permissions. [09/Nov/2020:23:00:57.276423366 -0500] - ERR - slapd_bootstrap_config - No valid configurations can be accessed! You must restore /etc/dirsrv/slapd-master1/dse.ldif from backup! [09/Nov/2020:23:00:57.276426797 -0500] - EMERG - main - The configuration files in directory /etc/dirsrv/slapd-master1 could not be read or were not found. Please refer to the error log or output for more information. | |||
Failed | suites/replication/tls_client_auth_repl_test.py::test_extract_pemfiles | 23.19 | |
tls_client_auth = <lib389.topologies.TopologyMain object at 0x7faa56090c10> def test_extract_pemfiles(tls_client_auth): """Test TLS client authentication between two masters operates as expected with 'on' and 'off' options of nsslapd-extract-pemfiles :id: 922d16f8-662a-4915-a39e-0aecd7c8e6e1 :setup: Two master replication, enabled TLS client auth :steps: 1. Check that nsslapd-extract-pemfiles default value is right 2. Check that replication works with both 'on' and 'off' values :expectedresults: 1. Success 2. Replication works """ m1 = tls_client_auth.ms['master1'] m2 = tls_client_auth.ms['master2'] repl = ReplicationManager(DEFAULT_SUFFIX) if ds_is_older('1.3.7'): default_val = 'off' else: default_val = 'on' attr_val = m1.config.get_attr_val_utf8('nsslapd-extract-pemfiles') log.info("Check that nsslapd-extract-pemfiles is {}".format(default_val)) assert attr_val == default_val for extract_pemfiles in ('on', 'off'): log.info("Set nsslapd-extract-pemfiles = '{}' and check replication works)") m1.config.set('nsslapd-extract-pemfiles', extract_pemfiles) m2.config.set('nsslapd-extract-pemfiles', extract_pemfiles) > repl.test_replication_topology(tls_client_auth) suites/replication/tls_client_auth_repl_test.py:175: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.8/site-packages/lib389/replica.py:2531: in test_replication_topology self.test_replication(a, b, timeout) /usr/local/lib/python3.8/site-packages/lib389/replica.py:2517: in test_replication self.wait_for_replication(from_instance, to_instance, timeout) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.replica.ReplicationManager object at 0x7faa5609d2b0> from_instance = <lib389.DirSrv object at 0x7faa560a4d90> to_instance = <lib389.DirSrv object at 0x7faa56090dc0>, timeout = 20 def wait_for_replication(self, from_instance, to_instance, timeout=20): """Wait for a replication event to occur from instance to instance. This shows some point of synchronisation has occured. :param from_instance: The instance whos state we we want to check from :type from_instance: lib389.DirSrv :param to_instance: The instance whos state we want to check matches from. :type to_instance: lib389.DirSrv :param timeout: Fail after timeout seconds. :type timeout: int """ # Touch something then wait_for_replication. from_groups = Groups(from_instance, basedn=self._suffix, rdn=None) to_groups = Groups(to_instance, basedn=self._suffix, rdn=None) from_group = from_groups.get('replication_managers') to_group = to_groups.get('replication_managers') change = str(uuid.uuid4()) from_group.replace('description', change) for i in range(0, timeout): desc = to_group.get_attr_val_utf8('description') if change == desc: self._log.info("SUCCESS: Replication from %s to %s is working" % (from_instance.ldapuri, to_instance.ldapuri)) return True self._log.info("Retry: Replication from %s to %s is NOT working (expect %s / got description=%s)" % (from_instance.ldapuri, to_instance.ldapuri, change, desc)) time.sleep(1) self._log.info("FAIL: Replication from %s to %s is NOT working (expect %s / got description=%s)" % (from_instance.ldapuri, to_instance.ldapuri, change, desc)) > raise Exception("Replication did not sync in time!") E Exception: Replication did not sync in time! /usr/local/lib/python3.8/site-packages/lib389/replica.py:2501: Exception -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.tls_client_auth_repl_test:tls_client_auth_repl_test.py:168 Check that nsslapd-extract-pemfiles is on [32mINFO [0m tests.suites.replication.tls_client_auth_repl_test:tls_client_auth_repl_test.py:172 Set nsslapd-extract-pemfiles = '{}' and check replication works) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect 17148dbb-38d7-48ba-a4b8-01a67cabb175 / got description=b5ddd3a1-104c-4b10-b54b-c091dce613c7) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect 7f47ce1d-9782-4dcb-8dab-6a78660dabca / got description=17148dbb-38d7-48ba-a4b8-01a67cabb175) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is working [32mINFO [0m tests.suites.replication.tls_client_auth_repl_test:tls_client_auth_repl_test.py:172 Set nsslapd-extract-pemfiles = '{}' and check replication works) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect 36e52bd5-c5af-43b6-9887-368ef4b7f5b1 / got description=7f47ce1d-9782-4dcb-8dab-6a78660dabca) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect a44c6262-8616-4f49-aed4-ea1d9bb10e7a / got description=36e52bd5-c5af-43b6-9887-368ef4b7f5b1) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect a44c6262-8616-4f49-aed4-ea1d9bb10e7a / got description=36e52bd5-c5af-43b6-9887-368ef4b7f5b1) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect a44c6262-8616-4f49-aed4-ea1d9bb10e7a / got description=36e52bd5-c5af-43b6-9887-368ef4b7f5b1) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect a44c6262-8616-4f49-aed4-ea1d9bb10e7a / got description=36e52bd5-c5af-43b6-9887-368ef4b7f5b1) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect a44c6262-8616-4f49-aed4-ea1d9bb10e7a / got description=36e52bd5-c5af-43b6-9887-368ef4b7f5b1) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect a44c6262-8616-4f49-aed4-ea1d9bb10e7a / got description=36e52bd5-c5af-43b6-9887-368ef4b7f5b1) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect a44c6262-8616-4f49-aed4-ea1d9bb10e7a / got description=36e52bd5-c5af-43b6-9887-368ef4b7f5b1) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect a44c6262-8616-4f49-aed4-ea1d9bb10e7a / got description=36e52bd5-c5af-43b6-9887-368ef4b7f5b1) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect a44c6262-8616-4f49-aed4-ea1d9bb10e7a / got description=36e52bd5-c5af-43b6-9887-368ef4b7f5b1) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect a44c6262-8616-4f49-aed4-ea1d9bb10e7a / got description=36e52bd5-c5af-43b6-9887-368ef4b7f5b1) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect a44c6262-8616-4f49-aed4-ea1d9bb10e7a / got description=36e52bd5-c5af-43b6-9887-368ef4b7f5b1) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect a44c6262-8616-4f49-aed4-ea1d9bb10e7a / got description=36e52bd5-c5af-43b6-9887-368ef4b7f5b1) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect a44c6262-8616-4f49-aed4-ea1d9bb10e7a / got description=36e52bd5-c5af-43b6-9887-368ef4b7f5b1) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect a44c6262-8616-4f49-aed4-ea1d9bb10e7a / got description=36e52bd5-c5af-43b6-9887-368ef4b7f5b1) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect a44c6262-8616-4f49-aed4-ea1d9bb10e7a / got description=36e52bd5-c5af-43b6-9887-368ef4b7f5b1) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect a44c6262-8616-4f49-aed4-ea1d9bb10e7a / got description=36e52bd5-c5af-43b6-9887-368ef4b7f5b1) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect a44c6262-8616-4f49-aed4-ea1d9bb10e7a / got description=36e52bd5-c5af-43b6-9887-368ef4b7f5b1) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect a44c6262-8616-4f49-aed4-ea1d9bb10e7a / got description=36e52bd5-c5af-43b6-9887-368ef4b7f5b1) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect a44c6262-8616-4f49-aed4-ea1d9bb10e7a / got description=36e52bd5-c5af-43b6-9887-368ef4b7f5b1) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect a44c6262-8616-4f49-aed4-ea1d9bb10e7a / got description=36e52bd5-c5af-43b6-9887-368ef4b7f5b1) [32mINFO [0m lib389.replica:replica.py:2500 FAIL: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect a44c6262-8616-4f49-aed4-ea1d9bb10e7a / got description=36e52bd5-c5af-43b6-9887-368ef4b7f5b1) | |||
Failed | suites/schema/schema_reload_test.py::test_schema_operation | 2.18 | |
topo = <lib389.topologies.TopologyMain object at 0x7faa463b8ee0> def test_schema_operation(topo): """Test that the cases in original schema are preserved. Test that duplicated schema except cases are not loaded Test to use a custom schema :id: e7448863-ac62-4b49-b013-4efa412c0455 :setup: Standalone instance :steps: 1. Create a test schema with cases 2. Run a schema_reload task 3. Check the attribute is present 4. Case 2: Check duplicated schema except cases are not loaded 5. Case 2-1: Use the custom schema :expectedresults: 1. Operation should be successful 2. Operation should be successful 3. Operation should be successful 4. Operation should be successful 5. Operation should be successful """ log.info('case 1: Test the cases in the original schema are preserved.') schema_filename = topo.standalone.schemadir + '/98test.ldif' try: with open(schema_filename, "w") as schema_file: schema_file.write("dn: cn=schema\n") schema_file.write("attributetypes: ( 8.9.10.11.12.13.14 NAME " + "'MoZiLLaaTTRiBuTe' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 " + " X-ORIGIN 'Mozilla Dummy Schema' )\n") schema_file.write("objectclasses: ( 1.2.3.4.5.6.7 NAME 'MozillaObject' " + "SUP top MUST ( objectclass $ cn ) MAY ( MoZiLLaaTTRiBuTe )" + " X-ORIGIN 'user defined' )')\n") except OSError as e: log.fatal("Failed to create schema file: " + "{} Error: {}".format(schema_filename, str(e))) # run the schema reload task with the default schemadir schema = Schema(topo.standalone) task = schema.reload(schema_dir=topo.standalone.schemadir) task.wait() subschema = topo.standalone.schema.get_subschema() at_obj = subschema.get_obj(ldap.schema.AttributeType, 'MoZiLLaaTTRiBuTe') > assert at_obj is not None, "The attribute was not found on server" E AssertionError: The attribute was not found on server E assert None is not None suites/schema/schema_reload_test.py:120: AssertionError -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.schema.schema_reload_test:schema_reload_test.py:94 case 1: Test the cases in the original schema are preserved. | |||
Failed | suites/schema/schema_reload_test.py::test_valid_schema | 2.02 | |
topo = <lib389.topologies.TopologyMain object at 0x7faa463b8ee0> def test_valid_schema(topo): """Test schema-reload task with valid schema :id: 2ab304c0-3e58-4d34-b23b-a14b5997c7a8 :setup: Standalone instance :steps: 1. Create schema file with valid schema 2. Run schema-reload.pl script 3. Run ldapsearch and check if schema was added :expectedresults: 1. File creation should work 2. The schema reload task should be successful 3. Searching the server should return the new schema """ log.info("Test schema-reload task with valid schema") # Step 1 - Create schema file log.info("Create valid schema file (99user.ldif)...") schema_filename = (topo.standalone.schemadir + "/99user.ldif") try: with open(schema_filename, 'w') as schema_file: schema_file.write("dn: cn=schema\n") schema_file.write("attributetypes: ( 8.9.10.11.12.13.13 NAME " + "'ValidAttribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15" + " X-ORIGIN 'Mozilla Dummy Schema' )\n") schema_file.write("objectclasses: ( 1.2.3.4.5.6.7.8 NAME 'TestObject' " + "SUP top MUST ( objectclass $ cn ) MAY ( givenName $ " + "sn $ ValidAttribute ) X-ORIGIN 'user defined' )')\n") except OSError as e: log.fatal("Failed to create schema file: " + "{} Error: {}".format(schema_filename, str(e))) # Step 2 - Run the schema-reload task log.info("Run the schema-reload task...") schema = Schema(topo.standalone) task = schema.reload(schema_dir=topo.standalone.schemadir) task.wait() > assert task.get_exit_code() == 0, "The schema reload task failed" E AssertionError: The schema reload task failed E assert 65 == 0 E +65 E -0 suites/schema/schema_reload_test.py:207: AssertionError -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.schema.schema_reload_test:schema_reload_test.py:184 Test schema-reload task with valid schema [32mINFO [0m tests.suites.schema.schema_reload_test:schema_reload_test.py:187 Create valid schema file (99user.ldif)... [32mINFO [0m tests.suites.schema.schema_reload_test:schema_reload_test.py:203 Run the schema-reload task... | |||
Failed | suites/syncrepl_plugin/basic_test.py::test_sync_repl_cookie | 0.00 | |
topology = <lib389.topologies.TopologyMain object at 0x7faa4645c0d0> request = <FixtureRequest for <Function test_sync_repl_cookie>> def test_sync_repl_cookie(topology, request): """Test sync_repl cookie are progressing is an increasing order when there are nested updates :id: d7fbde25-5702-46ac-b38e-169d7a68e97c :setup: Standalone Instance :steps: 1.: enable retroCL 2.: configure retroCL to log nsuniqueid as targetUniqueId 3.: enable content_sync plugin 4.: enable automember 5.: create (2) groups. Few groups can help to reproduce the concurrent updates problem. 6.: configure automember to provision those groups with 'member' 7.: enable and configure memberof plugin 8.: enable plugin log level 9.: restart the server 10.: create a thread dedicated to run a sync repl client 11.: Create (9) users that will generate nested updates (automember/memberof) 12.: stop sync repl client and collect the list of cookie.change_no 13.: check that cookies.change_no are in increasing order :expectedresults: 1.: succeeds 2.: succeeds 3.: succeeds 4.: succeeds 5.: succeeds 6.: succeeds 7.: succeeds 8.: succeeds 9.: succeeds 10.: succeeds 11.: succeeds 12.: succeeds 13.: succeeds """ inst = topology[0] # Enable/configure retroCL plugin = RetroChangelogPlugin(inst) > plugin.disable() suites/syncrepl_plugin/basic_test.py:275: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.8/site-packages/lib389/plugins.py:63: in disable self.set('nsslapd-pluginEnabled', 'off') _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.plugins.RetroChangelogPlugin object at 0x7faa560a61f0> key = 'nsslapd-pluginEnabled', value = 'off', action = 2 def set(self, key, value, action=ldap.MOD_REPLACE): """Perform a specified action on a key with value :param key: an attribute name :type key: str :param value: an attribute value :type value: str :param action: - ldap.MOD_REPLACE - by default - ldap.MOD_ADD - ldap.MOD_DELETE :type action: int :returns: result of modify_s operation :raises: ValueError - if instance is not online """ if action == ldap.MOD_ADD: action_txt = "ADD" elif action == ldap.MOD_REPLACE: action_txt = "REPLACE" elif action == ldap.MOD_DELETE: action_txt = "DELETE" else: # This should never happen (bug!) action_txt = "UNKNOWN" if value is None or len(value) < 512: self._log.debug("%s set %s: (%r, %r)" % (self._dn, action_txt, key, display_log_value(key, value))) else: self._log.debug("%s set %s: (%r, value too large)" % (self._dn, action_txt, key)) if self._instance.state != DIRSRV_STATE_ONLINE: > raise ValueError("Invalid state. Cannot set properties on instance that is not ONLINE.") E ValueError: Invalid state. Cannot set properties on instance that is not ONLINE. /usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:438: ValueError | |||
Failed | suites/syncrepl_plugin/basic_test.py::test_sync_repl_cookie_add_del | 0.00 | |
topology = <lib389.topologies.TopologyMain object at 0x7faa4645c0d0> request = <FixtureRequest for <Function test_sync_repl_cookie_add_del>> def test_sync_repl_cookie_add_del(topology, request): """Test sync_repl cookie are progressing is an increasing order when there add and del :id: 83e11038-6ed0-4a5b-ac77-e44887ab11e3 :setup: Standalone Instance :steps: 1.: enable retroCL 2.: configure retroCL to log nsuniqueid as targetUniqueId 3.: enable content_sync plugin 4.: enable automember 5.: create (2) groups. Few groups can help to reproduce the concurrent updates problem. 6.: configure automember to provision those groups with 'member' 7.: enable and configure memberof plugin 8.: enable plugin log level 9.: restart the server 10.: create a thread dedicated to run a sync repl client 11.: Create (3) users that will generate nested updates (automember/memberof) 12.: Delete (3) users 13.: stop sync repl client and collect the list of cookie.change_no 14.: check that cookies.change_no are in increasing order :expectedresults: 1.: succeeds 2.: succeeds 3.: succeeds 4.: succeeds 5.: succeeds 6.: succeeds 7.: succeeds 8.: succeeds 9.: succeeds 10.: succeeds 11.: succeeds 12.: succeeds 13.: succeeds 14.: succeeds """ inst = topology[0] # Enable/configure retroCL plugin = RetroChangelogPlugin(inst) > plugin.disable() suites/syncrepl_plugin/basic_test.py:407: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.8/site-packages/lib389/plugins.py:63: in disable self.set('nsslapd-pluginEnabled', 'off') _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.plugins.RetroChangelogPlugin object at 0x7faa555a5070> key = 'nsslapd-pluginEnabled', value = 'off', action = 2 def set(self, key, value, action=ldap.MOD_REPLACE): """Perform a specified action on a key with value :param key: an attribute name :type key: str :param value: an attribute value :type value: str :param action: - ldap.MOD_REPLACE - by default - ldap.MOD_ADD - ldap.MOD_DELETE :type action: int :returns: result of modify_s operation :raises: ValueError - if instance is not online """ if action == ldap.MOD_ADD: action_txt = "ADD" elif action == ldap.MOD_REPLACE: action_txt = "REPLACE" elif action == ldap.MOD_DELETE: action_txt = "DELETE" else: # This should never happen (bug!) action_txt = "UNKNOWN" if value is None or len(value) < 512: self._log.debug("%s set %s: (%r, %r)" % (self._dn, action_txt, key, display_log_value(key, value))) else: self._log.debug("%s set %s: (%r, value too large)" % (self._dn, action_txt, key)) if self._instance.state != DIRSRV_STATE_ONLINE: > raise ValueError("Invalid state. Cannot set properties on instance that is not ONLINE.") E ValueError: Invalid state. Cannot set properties on instance that is not ONLINE. /usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:438: ValueError | |||
Failed | suites/syncrepl_plugin/basic_test.py::test_sync_repl_cookie_with_failure | 0.00 | |
topology = <lib389.topologies.TopologyMain object at 0x7faa4645c0d0> request = <FixtureRequest for <Function test_sync_repl_cookie_with_failure>> def test_sync_repl_cookie_with_failure(topology, request): """Test sync_repl cookie are progressing is the right order when there is a failure in nested updates :id: e0103448-170e-4080-8f22-c34606447ce2 :setup: Standalone Instance :steps: 1.: enable retroCL 2.: configure retroCL to log nsuniqueid as targetUniqueId 3.: enable content_sync plugin 4.: enable automember 5.: create (4) groups. make group2 groupOfUniqueNames so the automember will fail to add 'member' (uniqueMember expected) 6.: configure automember to provision those groups with 'member' 7.: enable and configure memberof plugin 8.: enable plugin log level 9.: restart the server 10.: create a thread dedicated to run a sync repl client 11.: Create a group that will be the only update received by sync repl client 12.: Create (9) users that will generate nested updates (automember/memberof) 13.: stop sync repl client and collect the list of cookie.change_no 14.: check that the list of cookie.change_no contains only the group 'step 11' :expectedresults: 1.: succeeds 2.: succeeds 3.: succeeds 4.: succeeds 5.: succeeds 6.: succeeds 7.: succeeds 8.: succeeds 9.: succeeds 10.: succeeds 11.: succeeds 12.: Fails (expected) 13.: succeeds 14.: succeeds """ inst = topology[0] # Enable/configure retroCL plugin = RetroChangelogPlugin(inst) > plugin.disable() suites/syncrepl_plugin/basic_test.py:539: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.8/site-packages/lib389/plugins.py:63: in disable self.set('nsslapd-pluginEnabled', 'off') _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.plugins.RetroChangelogPlugin object at 0x7faa560aa100> key = 'nsslapd-pluginEnabled', value = 'off', action = 2 def set(self, key, value, action=ldap.MOD_REPLACE): """Perform a specified action on a key with value :param key: an attribute name :type key: str :param value: an attribute value :type value: str :param action: - ldap.MOD_REPLACE - by default - ldap.MOD_ADD - ldap.MOD_DELETE :type action: int :returns: result of modify_s operation :raises: ValueError - if instance is not online """ if action == ldap.MOD_ADD: action_txt = "ADD" elif action == ldap.MOD_REPLACE: action_txt = "REPLACE" elif action == ldap.MOD_DELETE: action_txt = "DELETE" else: # This should never happen (bug!) action_txt = "UNKNOWN" if value is None or len(value) < 512: self._log.debug("%s set %s: (%r, %r)" % (self._dn, action_txt, key, display_log_value(key, value))) else: self._log.debug("%s set %s: (%r, value too large)" % (self._dn, action_txt, key)) if self._instance.state != DIRSRV_STATE_ONLINE: > raise ValueError("Invalid state. Cannot set properties on instance that is not ONLINE.") E ValueError: Invalid state. Cannot set properties on instance that is not ONLINE. /usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:438: ValueError | |||
Failed | suites/vlv/regression_test.py::test_bulk_import_when_the_backend_with_vlv_was_recreated | 0.57 | |
self = <lib389.mappingTree.MappingTreeLegacy object at 0x7faa45f8cfa0> suffix = 'dc=example,dc=com', bename = 'userRoot', parent = None def create(self, suffix=None, bename=None, parent=None): ''' Create a mapping tree entry (under "cn=mapping tree,cn=config"), for the 'suffix' and that is stored in 'bename' backend. 'bename' backend must exist before creating the mapping tree entry. If a 'parent' is provided that means that we are creating a sub-suffix mapping tree. @param suffix - suffix mapped by this mapping tree entry. It will be the common name ('cn') of the entry @param benamebase - backend common name (e.g. 'userRoot') @param parent - if provided is a parent suffix of 'suffix' @return DN of the mapping tree entry @raise ldap.NO_SUCH_OBJECT - if the backend entry or parent mapping tree does not exist ValueError - if missing a parameter, ''' # Check suffix is provided if not suffix: raise ValueError("suffix is mandatory") else: nsuffix = normalizeDN(suffix) # Check backend name is provided if not bename: raise ValueError("backend name is mandatory") # Check that if the parent suffix is provided then # it exists a mapping tree for it if parent: nparent = normalizeDN(parent) filt = suffixfilt(parent) try: entry = self.conn.getEntry(DN_MAPPING_TREE, ldap.SCOPE_SUBTREE, filt) pass except NoSuchEntryError: raise ValueError("parent suffix has no mapping tree") else: nparent = "" # Check if suffix exists, return filt = suffixfilt(suffix) try: entry = self.conn.getEntry(DN_MAPPING_TREE, ldap.SCOPE_SUBTREE, filt) return entry except ldap.NO_SUCH_OBJECT: entry = None # # Now start the real work # # fix me when we can actually used escaped DNs dn = ','.join(('cn="%s"' % nsuffix, DN_MAPPING_TREE)) entry = Entry(dn) entry.update({ 'objectclass': ['top', 'extensibleObject', MT_OBJECTCLASS_VALUE], 'nsslapd-state': 'backend', # the value in the dn has to be DN escaped # internal code will add the quoted value - unquoted value is # useful for searching. MT_PROPNAME_TO_ATTRNAME[MT_SUFFIX]: nsuffix, MT_PROPNAME_TO_ATTRNAME[MT_BACKEND]: bename }) # possibly add the parent if parent: entry.setValues(MT_PROPNAME_TO_ATTRNAME[MT_PARENT_SUFFIX], nparent) try: self.log.debug("Creating entry: %s", entry.dn) self.log.info("Entry %r", entry) > self.conn.add_s(entry) /usr/local/lib/python3.8/site-packages/lib389/mappingTree.py:155: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (dn: cn="dc=example,dc=com",cn=mapping tree,cn=config cn: dc=example,dc=com nsslapd-backend: userRoot nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree ,) kwargs = {} c_stack = [FrameInfo(frame=<frame at 0x7faa5aaf5040, file '/usr/local/lib/python3.8/site-packages/lib389/__init__.py', line 176,...mbda>', code_context=[' self._inner_hookexec = lambda hook, methods, kwargs: hook.multicall(\n'], index=0), ...] frame = FrameInfo(frame=<frame at 0x55ebeb544620, file '/usr/local/lib/python3.8/site-packages/lib389/mappingTree.py', line 15.../lib389/mappingTree.py', lineno=155, function='create', code_context=[' self.conn.add_s(entry)\n'], index=0) ent = dn: cn="dc=example,dc=com",cn=mapping tree,cn=config cn: dc=example,dc=com nsslapd-backend: userRoot nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): > return f(ent.dn, ent.toTupleList(), *args[2:]) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:176: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa45f9c280> dn = 'cn="dc=example,dc=com",cn=mapping tree,cn=config' modlist = [('objectclass', [b'top', b'extensibleObject', b'nsMappingTree']), ('nsslapd-state', [b'backend']), ('cn', [b'dc=example,dc=com']), ('nsslapd-backend', [b'userRoot'])] def add_s(self,dn,modlist): > return self.add_ext_s(dn,modlist,None,None) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:439: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = ('cn="dc=example,dc=com",cn=mapping tree,cn=config', [('objectclass', [b'top', b'extensibleObject', b'nsMappingTree']), ('nsslapd-state', [b'backend']), ('cn', [b'dc=example,dc=com']), ('nsslapd-backend', [b'userRoot'])], None, None) kwargs = {}, ent = 'cn="dc=example,dc=com",cn=mapping tree,cn=config' def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: > return f(*args, **kwargs) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:178: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa45f9c280> dn = 'cn="dc=example,dc=com",cn=mapping tree,cn=config' modlist = [('objectclass', [b'top', b'extensibleObject', b'nsMappingTree']), ('nsslapd-state', [b'backend']), ('cn', [b'dc=example,dc=com']), ('nsslapd-backend', [b'userRoot'])] serverctrls = None, clientctrls = None def add_ext_s(self,dn,modlist,serverctrls=None,clientctrls=None): msgid = self.add_ext(dn,modlist,serverctrls,clientctrls) > resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:425: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (76,), kwargs = {'all': 1, 'timeout': -1} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa45f9c280>, msgid = 76, all = 1 timeout = -1, resp_ctrl_classes = None def result3(self,msgid=ldap.RES_ANY,all=1,timeout=None,resp_ctrl_classes=None): > resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( msgid,all,timeout, add_ctrls=0,add_intermediates=0,add_extop=0, resp_ctrl_classes=resp_ctrl_classes ) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:764: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (76, 1, -1) kwargs = {'add_ctrls': 0, 'add_extop': 0, 'add_intermediates': 0, 'resp_ctrl_classes': None} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa45f9c280>, msgid = 76, all = 1 timeout = -1, add_ctrls = 0, add_intermediates = 0, add_extop = 0 resp_ctrl_classes = None def result4(self,msgid=ldap.RES_ANY,all=1,timeout=None,add_ctrls=0,add_intermediates=0,add_extop=0,resp_ctrl_classes=None): if timeout is None: timeout = self.timeout > ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:774: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (<built-in method result4 of LDAP object at 0x7faa45fe83c0>, 76, 1, -1, 0, 0, ...) kwargs = {} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa45f9c280> func = <built-in method result4 of LDAP object at 0x7faa45fe83c0> args = (76, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: result = func(*args,**kwargs) if __debug__ and self._trace_level>=2: if func.__name__!="unbind_ext": diagnostic_message_success = self._l.get_option(ldap.OPT_DIAGNOSTIC_MESSAGE) finally: self._ldap_object_lock.release() except LDAPError as e: exc_type,exc_value,exc_traceback = sys.exc_info() try: if 'info' not in e.args[0] and 'errno' in e.args[0]: e.args[0]['info'] = strerror(e.args[0]['errno']) except IndexError: pass if __debug__ and self._trace_level>=2: self._trace_file.write('=> LDAPError - %s: %s\n' % (e.__class__.__name__,str(e))) try: > reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:340: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ exc_type = <class 'ldap.UNWILLING_TO_PERFORM'> exc_value = UNWILLING_TO_PERFORM({'msgtype': 105, 'msgid': 76, 'result': 53, 'desc': 'Server is unwilling to perform', 'ctrls': []}) exc_traceback = <traceback object at 0x7faa54be6680> def reraise(exc_type, exc_value, exc_traceback): """Re-raise an exception given information from sys.exc_info() Note that unlike six.reraise, this does not support replacing the traceback. All arguments must come from a single sys.exc_info() call. """ # In Python 3, all exception info is contained in one object. > raise exc_value /usr/lib64/python3.8/site-packages/ldap/compat.py:46: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa45f9c280> func = <built-in method result4 of LDAP object at 0x7faa45fe83c0> args = (76, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.UNWILLING_TO_PERFORM: {'msgtype': 105, 'msgid': 76, 'result': 53, 'desc': 'Server is unwilling to perform', 'ctrls': []} /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:324: UNWILLING_TO_PERFORM During handling of the above exception, another exception occurred: topology_m2 = <lib389.topologies.TopologyMain object at 0x7faa560a4f70> @pytest.mark.DS47966 def test_bulk_import_when_the_backend_with_vlv_was_recreated(topology_m2): """ Testing bulk import when the backend with VLV was recreated. If the test passes without the server crash, 47966 is verified. :id: 512963fa-fe02-11e8-b1d3-8c16451d917b :setup: Replication with two masters. :steps: 1. Generate vlvSearch entry 2. Generate vlvIndex entry 3. Delete the backend instance on Master 2 4. Delete the agreement, replica, and mapping tree, too. 5. Recreate the backend and the VLV index on Master 2. 6. Recreating vlvSrchDn and vlvIndexDn on Master 2. :expectedresults: 1. Should Success. 2. Should Success. 3. Should Success. 4. Should Success. 5. Should Success. 6. Should Success. """ M1 = topology_m2.ms["master1"] M2 = topology_m2.ms["master2"] # generate vlvSearch entry properties_for_search = { "objectclass": ["top", "vlvSearch"], "cn": "vlvSrch", "vlvbase": DEFAULT_SUFFIX, "vlvfilter": "(|(objectclass=*)(objectclass=ldapsubentry))", "vlvscope": "2", } vlv_searches = VLVSearch(M2) userroot_vlvsearch = vlv_searches.create( basedn="cn=userRoot,cn=ldbm database,cn=plugins,cn=config", properties=properties_for_search, ) assert "cn=vlvSrch,cn=userRoot,cn=ldbm database,cn=plugins,cn=config" in M2.getEntry( "cn=vlvSrch,cn=userRoot,cn=ldbm database,cn=plugins,cn=config").dn # generate vlvIndex entry properties_for_index = { "objectclass": ["top", "vlvIndex"], "cn": "vlvIdx", "vlvsort": "cn ou sn", } vlv_index = VLVIndex(M2) userroot_index = vlv_index.create( basedn="cn=vlvSrch,cn=userRoot,cn=ldbm database,cn=plugins,cn=config", properties=properties_for_index, ) assert "cn=vlvIdx,cn=vlvSrch,cn=userRoot,cn=ldbm database,cn=plugins,cn=config" in M2.getEntry( "cn=vlvIdx,cn=vlvSrch,cn=userRoot,cn=ldbm database,cn=plugins,cn=config").dn # Delete the backend instance on Master 2." userroot_index.delete() userroot_vlvsearch.delete_all() # delete the agreement, replica, and mapping tree, too. repl = ReplicationManager(DEFAULT_SUFFIX) repl.remove_master(M2) MappingTrees(M2).list()[0].delete() Backends(M2).list()[0].delete() # Recreate the backend and the VLV index on Master 2. > M2.mappingtree.create(DEFAULT_SUFFIX, "userRoot") suites/vlv/regression_test.py:87: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.mappingTree.MappingTreeLegacy object at 0x7faa45f8cfa0> suffix = 'dc=example,dc=com', bename = 'userRoot', parent = None def create(self, suffix=None, bename=None, parent=None): ''' Create a mapping tree entry (under "cn=mapping tree,cn=config"), for the 'suffix' and that is stored in 'bename' backend. 'bename' backend must exist before creating the mapping tree entry. If a 'parent' is provided that means that we are creating a sub-suffix mapping tree. @param suffix - suffix mapped by this mapping tree entry. It will be the common name ('cn') of the entry @param benamebase - backend common name (e.g. 'userRoot') @param parent - if provided is a parent suffix of 'suffix' @return DN of the mapping tree entry @raise ldap.NO_SUCH_OBJECT - if the backend entry or parent mapping tree does not exist ValueError - if missing a parameter, ''' # Check suffix is provided if not suffix: raise ValueError("suffix is mandatory") else: nsuffix = normalizeDN(suffix) # Check backend name is provided if not bename: raise ValueError("backend name is mandatory") # Check that if the parent suffix is provided then # it exists a mapping tree for it if parent: nparent = normalizeDN(parent) filt = suffixfilt(parent) try: entry = self.conn.getEntry(DN_MAPPING_TREE, ldap.SCOPE_SUBTREE, filt) pass except NoSuchEntryError: raise ValueError("parent suffix has no mapping tree") else: nparent = "" # Check if suffix exists, return filt = suffixfilt(suffix) try: entry = self.conn.getEntry(DN_MAPPING_TREE, ldap.SCOPE_SUBTREE, filt) return entry except ldap.NO_SUCH_OBJECT: entry = None # # Now start the real work # # fix me when we can actually used escaped DNs dn = ','.join(('cn="%s"' % nsuffix, DN_MAPPING_TREE)) entry = Entry(dn) entry.update({ 'objectclass': ['top', 'extensibleObject', MT_OBJECTCLASS_VALUE], 'nsslapd-state': 'backend', # the value in the dn has to be DN escaped # internal code will add the quoted value - unquoted value is # useful for searching. MT_PROPNAME_TO_ATTRNAME[MT_SUFFIX]: nsuffix, MT_PROPNAME_TO_ATTRNAME[MT_BACKEND]: bename }) # possibly add the parent if parent: entry.setValues(MT_PROPNAME_TO_ATTRNAME[MT_PARENT_SUFFIX], nparent) try: self.log.debug("Creating entry: %s", entry.dn) self.log.info("Entry %r", entry) self.conn.add_s(entry) except ldap.LDAPError as e: > raise ldap.LDAPError("Error adding suffix entry " + dn, e) E ldap.LDAPError: ('Error adding suffix entry cn="dc=example,dc=com",cn=mapping tree,cn=config', UNWILLING_TO_PERFORM({'msgtype': 105, 'msgid': 76, 'result': 53, 'desc': 'Server is unwilling to perform', 'ctrls': []})) /usr/local/lib/python3.8/site-packages/lib389/mappingTree.py:157: LDAPError -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c35a29d8-4e07-464c-bf6f-5bf7b48292ab / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 20c27487-12e4-40a0-b55d-21a717dff4d3 / got description=c35a29d8-4e07-464c-bf6f-5bf7b48292ab) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists ------------------------------Captured stdout call------------------------------ deleting vlv search: cn=vlvSrch,cn=userRoot,cn=ldbm database,cn=plugins,cn=config deleting vlv search entry... -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:mappingTree.py:154 Entry dn: cn="dc=example,dc=com",cn=mapping tree,cn=config cn: dc=example,dc=com nsslapd-backend: userRoot nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree | |||
Failed | tickets/ticket47781_test.py::test_ticket47781 | 4.10 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7faa45f975e0> def test_ticket47781(topology_st): """ Testing for a deadlock after doing an online import of an LDIF with replication data. The replication agreement should be invalid. """ log.info('Testing Ticket 47781 - Testing for deadlock after importing LDIF with replication data') master = topology_st.standalone repl = ReplicationManager(DEFAULT_SUFFIX) repl.create_first_master(master) properties = {RA_NAME: r'meTo_$host:$port', RA_BINDDN: defaultProperties[REPLICATION_BIND_DN], RA_BINDPW: defaultProperties[REPLICATION_BIND_PW], RA_METHOD: defaultProperties[REPLICATION_BIND_METHOD], RA_TRANSPORT_PROT: defaultProperties[REPLICATION_TRANSPORT]} # The agreement should point to a server that does NOT exist (invalid port) repl_agreement = master.agreement.create(suffix=DEFAULT_SUFFIX, host=master.host, port=5555, properties=properties) # # add two entries # log.info('Adding two entries...') master.add_s(Entry(('cn=entry1,dc=example,dc=com', { 'objectclass': 'top person'.split(), 'sn': 'user', 'cn': 'entry1'}))) master.add_s(Entry(('cn=entry2,dc=example,dc=com', { 'objectclass': 'top person'.split(), 'sn': 'user', 'cn': 'entry2'}))) # # export the replication ldif # log.info('Exporting replication ldif...') args = {EXPORT_REPL_INFO: True} exportTask = Tasks(master) exportTask.exportLDIF(DEFAULT_SUFFIX, None, "/tmp/export.ldif", args) # # Restart the server # log.info('Restarting server...') master.stop() master.start() # # Import the ldif # log.info('Import replication LDIF file...') importTask = Tasks(master) args = {TASK_WAIT: True} > importTask.importLDIF(DEFAULT_SUFFIX, None, "/tmp/export.ldif", args) tickets/ticket47781_test.py:85: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.tasks.Tasks object at 0x7faa560ee2e0> suffix = 'dc=example,dc=com', benamebase = None, input_file = '/tmp/export.ldif' args = {'wait': True} def importLDIF(self, suffix=None, benamebase=None, input_file=None, args=None): ''' Import from a LDIF format a given 'suffix' (or 'benamebase' that stores that suffix). It uses an internal task to acheive this request. If 'suffix' and 'benamebase' are specified, it uses 'benamebase' first else 'suffix'. If both 'suffix' and 'benamebase' are missing it raise ValueError 'input_file' is the ldif input file @param suffix - suffix of the backend @param benamebase - 'commonname'/'cn' of the backend (e.g. 'userRoot') @param ldif_input - file that will contain the entries in LDIF format to import @param args - is a dictionary that contains modifier of the import task wait: True/[False] - If True, 'export' waits for the completion of the task before to return @return None @raise ValueError ''' if self.conn.state != DIRSRV_STATE_ONLINE: raise ValueError("Invalid Server State %s! Must be online" % self.conn.state) # Checking the parameters if not benamebase and not suffix: raise ValueError("Specify either bename or suffix") if not input_file: raise ValueError("input_file is mandatory") if not os.path.exists(input_file): > raise ValueError("Import file (%s) does not exist" % input_file) E ValueError: Import file (/tmp/export.ldif) does not exist /usr/local/lib/python3.8/site-packages/lib389/tasks.py:473: ValueError -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:tasks.py:567 Export task export_11092020_233324 for file /tmp/export.ldif completed successfully | |||
Failed | tickets/ticket47988_test.py::test_ticket47988_init | 7.07 | |
topology_m2 = <lib389.topologies.TopologyMain object at 0x7faa46638430> def test_ticket47988_init(topology_m2): """ It adds - Objectclass with MAY 'member' - an entry ('bind_entry') with which we bind to test the 'SELFDN' operation It deletes the anonymous aci """ _header(topology_m2, 'test_ticket47988_init') # enable acl error logging mod = [(ldap.MOD_REPLACE, 'nsslapd-errorlog-level', ensure_bytes(str(8192)))] # REPL topology_m2.ms["master1"].modify_s(DN_CONFIG, mod) topology_m2.ms["master2"].modify_s(DN_CONFIG, mod) mod = [(ldap.MOD_REPLACE, 'nsslapd-accesslog-level', ensure_bytes(str(260)))] # Internal op topology_m2.ms["master1"].modify_s(DN_CONFIG, mod) topology_m2.ms["master2"].modify_s(DN_CONFIG, mod) # add dummy entries for cpt in range(MAX_OTHERS): name = "%s%d" % (OTHER_NAME, cpt) topology_m2.ms["master1"].add_s(Entry(("cn=%s,%s" % (name, SUFFIX), { 'objectclass': "top person".split(), 'sn': name, 'cn': name}))) # check that entry 0 is replicated before loop = 0 entryDN = "cn=%s0,%s" % (OTHER_NAME, SUFFIX) while loop <= 10: try: ent = topology_m2.ms["master2"].getEntry(entryDN, ldap.SCOPE_BASE, "(objectclass=*)", ['telephonenumber']) break except ldap.NO_SUCH_OBJECT: time.sleep(1) loop += 1 assert (loop <= 10) topology_m2.ms["master1"].stop(timeout=10) topology_m2.ms["master2"].stop(timeout=10) # install the specific schema M1: ipa3.3, M2: ipa4.1 schema_file = os.path.join(topology_m2.ms["master1"].getDir(__file__, DATA_DIR), "ticket47988/schema_ipa3.3.tar.gz") _install_schema(topology_m2.ms["master1"], schema_file) schema_file = os.path.join(topology_m2.ms["master1"].getDir(__file__, DATA_DIR), "ticket47988/schema_ipa4.1.tar.gz") _install_schema(topology_m2.ms["master2"], schema_file) > topology_m2.ms["master1"].start(timeout=10) /export/tests/tickets/ticket47988_test.py:157: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.8/site-packages/lib389/__init__.py:1079: in start subprocess.check_output(["systemctl", "start", "dirsrv@%s" % self.serverid], stderr=subprocess.STDOUT) /usr/lib64/python3.8/subprocess.py:411: in check_output return run(*popenargs, stdout=PIPE, timeout=timeout, check=True, _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ input = None, capture_output = False, timeout = None, check = True popenargs = (['systemctl', 'start', 'dirsrv@master1'],) kwargs = {'stderr': -2, 'stdout': -1} process = <subprocess.Popen object at 0x7faa4459b9d0> stdout = b'Job for dirsrv@master1.service failed because the control process exited with error code.\nSee "systemctl status dirsrv@master1.service" and "journalctl -xe" for details.\n' stderr = None, retcode = 1 def run(*popenargs, input=None, capture_output=False, timeout=None, check=False, **kwargs): """Run command with arguments and return a CompletedProcess instance. The returned instance will have attributes args, returncode, stdout and stderr. By default, stdout and stderr are not captured, and those attributes will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them. If check is True and the exit code was non-zero, it raises a CalledProcessError. The CalledProcessError object will have the return code in the returncode attribute, and output & stderr attributes if those streams were captured. If timeout is given, and the process takes too long, a TimeoutExpired exception will be raised. There is an optional argument "input", allowing you to pass bytes or a string to the subprocess's stdin. If you use this argument you may not also use the Popen constructor's "stdin" argument, as it will be used internally. By default, all communication is in bytes, and therefore any "input" should be bytes, and the stdout and stderr will be bytes. If in text mode, any "input" should be a string, and stdout and stderr will be strings decoded according to locale encoding, or by "encoding" if set. Text mode is triggered by setting any of text, encoding, errors or universal_newlines. The other arguments are the same as for the Popen constructor. """ if input is not None: if kwargs.get('stdin') is not None: raise ValueError('stdin and input arguments may not both be used.') kwargs['stdin'] = PIPE if capture_output: if kwargs.get('stdout') is not None or kwargs.get('stderr') is not None: raise ValueError('stdout and stderr arguments may not be used ' 'with capture_output.') kwargs['stdout'] = PIPE kwargs['stderr'] = PIPE with Popen(*popenargs, **kwargs) as process: try: stdout, stderr = process.communicate(input, timeout=timeout) except TimeoutExpired as exc: process.kill() if _mswindows: # Windows accumulates the output in a single blocking # read() call run on child threads, with the timeout # being done in a join() on those threads. communicate() # _after_ kill() is required to collect that and add it # to the exception. exc.stdout, exc.stderr = process.communicate() else: # POSIX _communicate already populated the output so # far into the TimeoutExpired exception. process.wait() raise except: # Including KeyboardInterrupt, communicate handled that. process.kill() # We don't call process.wait() as .__exit__ does that for us. raise retcode = process.poll() if check and retcode: > raise CalledProcessError(retcode, process.args, output=stdout, stderr=stderr) E subprocess.CalledProcessError: Command '['systemctl', 'start', 'dirsrv@master1']' returned non-zero exit status 1. /usr/lib64/python3.8/subprocess.py:512: CalledProcessError -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 9c5c1f0d-b999-459a-8f3c-38870bed38a6 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 462d6a64-33d6-44c8-b14c-b7580640301e / got description=9c5c1f0d-b999-459a-8f3c-38870bed38a6) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47988_test.py:64 ############################################### [32mINFO [0m lib389:ticket47988_test.py:65 ####### [32mINFO [0m lib389:ticket47988_test.py:66 ####### test_ticket47988_init [32mINFO [0m lib389:ticket47988_test.py:67 ####### [32mINFO [0m lib389:ticket47988_test.py:68 ################################################### [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/02common.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/50ns-admin.ldif [32mINFO [0m lib389:ticket47988_test.py:98 replace /etc/dirsrv/slapd-master1/schema/99user.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60nss-ldap.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60autofs.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/50ns-web.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60samba.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/10dna-plugin.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/05rfc4523.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60basev2.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/10automember-plugin.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/05rfc2927.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/10mep-plugin.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60ipadns.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/10rfc2307.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/50ns-mail.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/05rfc4524.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60trust.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60ipaconfig.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/50ns-directory.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60eduperson.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60mozilla.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/65ipasudo.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60rfc3712.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60rfc2739.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/50ns-value.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60acctpolicy.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/01core389.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60sabayon.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60pam-plugin.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/00core.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/25java-object.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60sudo.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/70ipaotp.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60pureftpd.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/61kerberos-ipav3.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60kerberos.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60basev3.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/06inetorgperson.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/30ns-common.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/28pilot.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/20subscriber.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/50ns-certificate.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60posix-winsync-plugin.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/02common.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/50ns-admin.ldif [32mINFO [0m lib389:ticket47988_test.py:98 replace /etc/dirsrv/slapd-master2/schema/99user.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60nss-ldap.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60autofs.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/50ns-web.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60samba.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/10dna-plugin.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/05rfc4523.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60basev2.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/10automember-plugin.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/05rfc2927.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/10mep-plugin.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60ipadns.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/10rfc2307.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/50ns-mail.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/05rfc4524.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60trust.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60ipaconfig.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/50ns-directory.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60eduperson.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60mozilla.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/65ipasudo.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60rfc3712.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60rfc2739.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/50ns-value.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60acctpolicy.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/01core389.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60sabayon.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60pam-plugin.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/00core.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/25java-object.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60sudo.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/70ipaotp.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60pureftpd.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/61kerberos-ipav3.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60kerberos.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60basev3.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/06inetorgperson.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/30ns-common.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/28pilot.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/20subscriber.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/50ns-certificate.ldif [32mINFO [0m lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60posix-winsync-plugin.ldif | |||
Failed | tickets/ticket47988_test.py::test_ticket47988_1 | 0.00 | |
topology_m2 = <lib389.topologies.TopologyMain object at 0x7faa46638430> def test_ticket47988_1(topology_m2): ''' Check that replication is working and pause replication M2->M1 ''' _header(topology_m2, 'test_ticket47988_1') topology_m2.ms["master1"].log.debug("\n\nCheck that replication is working and pause replication M2->M1\n") > _do_update_entry(supplier=topology_m2.ms["master2"], consumer=topology_m2.ms["master1"], attempts=5) /export/tests/tickets/ticket47988_test.py:234: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /export/tests/tickets/ticket47988_test.py:184: in _do_update_entry supplier.modify_s(entryDN, mod) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:640: in modify_s return self.modify_ext_s(dn,modlist,None,None) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:613: in modify_ext_s resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:764: in result3 resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:774: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa4660cd90> func = <built-in method result4 of LDAP object at 0x7faa46600c90> args = (26, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.SERVER_DOWN: {'result': -1, 'desc': "Can't contact LDAP server", 'ctrls': []} /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:324: SERVER_DOWN -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47988_test.py:64 ############################################### [32mINFO [0m lib389:ticket47988_test.py:65 ####### [32mINFO [0m lib389:ticket47988_test.py:66 ####### test_ticket47988_1 [32mINFO [0m lib389:ticket47988_test.py:67 ####### [32mINFO [0m lib389:ticket47988_test.py:68 ################################################### | |||
Failed | tickets/ticket47988_test.py::test_ticket47988_2 | 0.00 | |
topology_m2 = <lib389.topologies.TopologyMain object at 0x7faa46638430> def test_ticket47988_2(topology_m2): ''' Update M1 schema and trigger update M1->M2 So M1 should learn new/extended definitions that are in M2 schema ''' _header(topology_m2, 'test_ticket47988_2') topology_m2.ms["master1"].log.debug("\n\nUpdate M1 schema and an entry on M1\n") > master1_schema_csn = topology_m2.ms["master1"].schema.get_schema_csn() /export/tests/tickets/ticket47988_test.py:246: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.8/site-packages/lib389/schema.py:604: in get_schema_csn ents = self.conn.search_s(DN_SCHEMA, ldap.SCOPE_BASE, /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:870: in search_s return self.search_ext_s(base,scope,filterstr,attrlist,attrsonly,None,None,timeout=self.timeout) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:864: in search_ext_s return self.result(msgid,all=1,timeout=timeout)[1] /usr/local/lib/python3.8/site-packages/lib389/__init__.py:148: in inner objtype, data = f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:756: in result resp_type, resp_data, resp_msgid = self.result2(msgid,all,timeout) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:760: in result2 resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all,timeout) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:764: in result3 resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:774: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa46638460> func = <built-in method result4 of LDAP object at 0x7faa46629db0> args = (62, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.SERVER_DOWN: {'result': -1, 'desc': "Can't contact LDAP server", 'ctrls': []} /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:324: SERVER_DOWN -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47988_test.py:64 ############################################### [32mINFO [0m lib389:ticket47988_test.py:65 ####### [32mINFO [0m lib389:ticket47988_test.py:66 ####### test_ticket47988_2 [32mINFO [0m lib389:ticket47988_test.py:67 ####### [32mINFO [0m lib389:ticket47988_test.py:68 ################################################### | |||
Failed | tickets/ticket47988_test.py::test_ticket47988_3 | 0.01 | |
topology_m2 = <lib389.topologies.TopologyMain object at 0x7faa46638430> def test_ticket47988_3(topology_m2): ''' Resume replication M2->M1 and check replication is still working ''' _header(topology_m2, 'test_ticket47988_3') > _resume_M2_to_M1(topology_m2) /export/tests/tickets/ticket47988_test.py:283: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /export/tests/tickets/ticket47988_test.py:222: in _resume_M2_to_M1 ents = topology_m2.ms["master2"].agreement.list(suffix=SUFFIX) /usr/local/lib/python3.8/site-packages/lib389/agreement.py:905: in list replica_entries = self.conn.replica.list(suffix) /usr/local/lib/python3.8/site-packages/lib389/replica.py:178: in list ents = self.conn.search_s(base, ldap.SCOPE_SUBTREE, filtr) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:870: in search_s return self.search_ext_s(base,scope,filterstr,attrlist,attrsonly,None,None,timeout=self.timeout) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:863: in search_ext_s msgid = self.search_ext(base,scope,filterstr,attrlist,attrsonly,serverctrls,clientctrls,timeout,sizelimit) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:853: in search_ext return self._ldap_call( /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa4660cd90> func = <built-in method search_ext of LDAP object at 0x7faa46600c90> args = ('cn=mapping tree,cn=config', 2, '(&(objectclass=nsds5Replica)(nsDS5ReplicaRoot=dc=example,dc=com))', None, 0, None, ...) kwargs = {}, diagnostic_message_success = None, exc_type = None exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.SERVER_DOWN: {'result': -1, 'desc': "Can't contact LDAP server", 'ctrls': []} /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:324: SERVER_DOWN -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47988_test.py:64 ############################################### [32mINFO [0m lib389:ticket47988_test.py:65 ####### [32mINFO [0m lib389:ticket47988_test.py:66 ####### test_ticket47988_3 [32mINFO [0m lib389:ticket47988_test.py:67 ####### [32mINFO [0m lib389:ticket47988_test.py:68 ################################################### [32mINFO [0m lib389:ticket47988_test.py:221 ######################### resume RA M2->M1 ###################### | |||
Failed | tickets/ticket47988_test.py::test_ticket47988_4 | 0.00 | |
topology_m2 = <lib389.topologies.TopologyMain object at 0x7faa46638430> def test_ticket47988_4(topology_m2): ''' Check schemaCSN is identical on both server And save the nsschemaCSN to later check they do not change unexpectedly ''' _header(topology_m2, 'test_ticket47988_4') > master1_schema_csn = topology_m2.ms["master1"].schema.get_schema_csn() /export/tests/tickets/ticket47988_test.py:295: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.8/site-packages/lib389/schema.py:604: in get_schema_csn ents = self.conn.search_s(DN_SCHEMA, ldap.SCOPE_BASE, /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:870: in search_s return self.search_ext_s(base,scope,filterstr,attrlist,attrsonly,None,None,timeout=self.timeout) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:863: in search_ext_s msgid = self.search_ext(base,scope,filterstr,attrlist,attrsonly,serverctrls,clientctrls,timeout,sizelimit) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:853: in search_ext return self._ldap_call( /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa46638460> func = <built-in method search_ext of LDAP object at 0x7faa46629db0> args = ('cn=schema', 0, 'objectclass=*', ['nsSchemaCSN'], 0, None, ...) kwargs = {}, diagnostic_message_success = None, exc_type = None exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.SERVER_DOWN: {'result': -1, 'desc': "Can't contact LDAP server", 'ctrls': []} /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:324: SERVER_DOWN -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47988_test.py:64 ############################################### [32mINFO [0m lib389:ticket47988_test.py:65 ####### [32mINFO [0m lib389:ticket47988_test.py:66 ####### test_ticket47988_4 [32mINFO [0m lib389:ticket47988_test.py:67 ####### [32mINFO [0m lib389:ticket47988_test.py:68 ################################################### | |||
Failed | tickets/ticket47988_test.py::test_ticket47988_5 | 0.00 | |
topology_m2 = <lib389.topologies.TopologyMain object at 0x7faa46638430> def test_ticket47988_5(topology_m2): ''' Check schemaCSN do not change unexpectedly ''' _header(topology_m2, 'test_ticket47988_5') > _do_update_entry(supplier=topology_m2.ms["master1"], consumer=topology_m2.ms["master2"], attempts=5) /export/tests/tickets/ticket47988_test.py:313: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /export/tests/tickets/ticket47988_test.py:184: in _do_update_entry supplier.modify_s(entryDN, mod) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:640: in modify_s return self.modify_ext_s(dn,modlist,None,None) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:612: in modify_ext_s msgid = self.modify_ext(dn,modlist,serverctrls,clientctrls) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:609: in modify_ext return self._ldap_call(self._l.modify_ext,dn,modlist,RequestControlTuples(serverctrls),RequestControlTuples(clientctrls)) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa46638460> func = <built-in method modify_ext of LDAP object at 0x7faa46629db0> args = ('cn=other_entry0,dc=example,dc=com', [(2, 'telephonenumber', b'179')], None, None) kwargs = {}, diagnostic_message_success = None, exc_type = None exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.SERVER_DOWN: {'result': -1, 'desc': "Can't contact LDAP server", 'ctrls': []} /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:324: SERVER_DOWN -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47988_test.py:64 ############################################### [32mINFO [0m lib389:ticket47988_test.py:65 ####### [32mINFO [0m lib389:ticket47988_test.py:66 ####### test_ticket47988_5 [32mINFO [0m lib389:ticket47988_test.py:67 ####### [32mINFO [0m lib389:ticket47988_test.py:68 ################################################### | |||
Failed | tickets/ticket47988_test.py::test_ticket47988_6 | 0.01 | |
topology_m2 = <lib389.topologies.TopologyMain object at 0x7faa46638430> def test_ticket47988_6(topology_m2): ''' Update M1 schema and trigger update M2->M1 So M2 should learn new/extended definitions that are in M1 schema ''' _header(topology_m2, 'test_ticket47988_6') topology_m2.ms["master1"].log.debug("\n\nUpdate M1 schema and an entry on M1\n") > master1_schema_csn = topology_m2.ms["master1"].schema.get_schema_csn() /export/tests/tickets/ticket47988_test.py:336: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.8/site-packages/lib389/schema.py:604: in get_schema_csn ents = self.conn.search_s(DN_SCHEMA, ldap.SCOPE_BASE, /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:870: in search_s return self.search_ext_s(base,scope,filterstr,attrlist,attrsonly,None,None,timeout=self.timeout) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:863: in search_ext_s msgid = self.search_ext(base,scope,filterstr,attrlist,attrsonly,serverctrls,clientctrls,timeout,sizelimit) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:853: in search_ext return self._ldap_call( /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa46638460> func = <built-in method search_ext of LDAP object at 0x7faa46629db0> args = ('cn=schema', 0, 'objectclass=*', ['nsSchemaCSN'], 0, None, ...) kwargs = {}, diagnostic_message_success = None, exc_type = None exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.SERVER_DOWN: {'result': -1, 'desc': "Can't contact LDAP server", 'ctrls': []} /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:324: SERVER_DOWN -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47988_test.py:64 ############################################### [32mINFO [0m lib389:ticket47988_test.py:65 ####### [32mINFO [0m lib389:ticket47988_test.py:66 ####### test_ticket47988_6 [32mINFO [0m lib389:ticket47988_test.py:67 ####### [32mINFO [0m lib389:ticket47988_test.py:68 ################################################### | |||
Failed | tickets/ticket48005_test.py::test_ticket48005_setup | 4.88 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7faa5416d130> def test_ticket48005_setup(topology_st): ''' allow dump core generate a test ldif file using dbgen.pl import the ldif ''' log.info("Ticket 48005 setup...") if hasattr(topology_st.standalone, 'prefix'): prefix = topology_st.standalone.prefix else: prefix = None sysconfig_dirsrv = os.path.join(topology_st.standalone.get_initconfig_dir(), 'dirsrv') cmdline = 'egrep "ulimit -c unlimited" %s' % sysconfig_dirsrv p = os.popen(cmdline, "r") ulimitc = p.readline() if ulimitc == "": log.info('No ulimit -c in %s' % sysconfig_dirsrv) log.info('Adding it') cmdline = 'echo "ulimit -c unlimited" >> %s' % sysconfig_dirsrv sysconfig_dirsrv_systemd = sysconfig_dirsrv + ".systemd" cmdline = 'egrep LimitCORE=infinity %s' % sysconfig_dirsrv_systemd p = os.popen(cmdline, "r") lcore = p.readline() if lcore == "": log.info('No LimitCORE in %s' % sysconfig_dirsrv_systemd) log.info('Adding it') cmdline = 'echo LimitCORE=infinity >> %s' % sysconfig_dirsrv_systemd topology_st.standalone.restart(timeout=10) ldif_file = topology_st.standalone.get_ldif_dir() + "/ticket48005.ldif" os.system('ls %s' % ldif_file) os.system('rm -f %s' % ldif_file) if hasattr(topology_st.standalone, 'prefix'): prefix = topology_st.standalone.prefix else: prefix = "" dbgen_prog = prefix + '/bin/dbgen.pl' log.info('dbgen_prog: %s' % dbgen_prog) os.system('%s -s %s -o %s -u -n 10000' % (dbgen_prog, SUFFIX, ldif_file)) cmdline = 'egrep dn: %s | wc -l' % ldif_file p = os.popen(cmdline, "r") dnnumstr = p.readline() num = int(dnnumstr) log.info("We have %d entries.\n", num) importTask = Tasks(topology_st.standalone) args = {TASK_WAIT: True} > importTask.importLDIF(SUFFIX, None, ldif_file, args) /export/tests/tickets/ticket48005_test.py:74: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.tasks.Tasks object at 0x7faa473f6fa0> suffix = 'dc=example,dc=com', benamebase = None input_file = '/var/lib/dirsrv/slapd-standalone1/ldif/ticket48005.ldif' args = {'wait': True} def importLDIF(self, suffix=None, benamebase=None, input_file=None, args=None): ''' Import from a LDIF format a given 'suffix' (or 'benamebase' that stores that suffix). It uses an internal task to acheive this request. If 'suffix' and 'benamebase' are specified, it uses 'benamebase' first else 'suffix'. If both 'suffix' and 'benamebase' are missing it raise ValueError 'input_file' is the ldif input file @param suffix - suffix of the backend @param benamebase - 'commonname'/'cn' of the backend (e.g. 'userRoot') @param ldif_input - file that will contain the entries in LDIF format to import @param args - is a dictionary that contains modifier of the import task wait: True/[False] - If True, 'export' waits for the completion of the task before to return @return None @raise ValueError ''' if self.conn.state != DIRSRV_STATE_ONLINE: raise ValueError("Invalid Server State %s! Must be online" % self.conn.state) # Checking the parameters if not benamebase and not suffix: raise ValueError("Specify either bename or suffix") if not input_file: raise ValueError("input_file is mandatory") if not os.path.exists(input_file): > raise ValueError("Import file (%s) does not exist" % input_file) E ValueError: Import file (/var/lib/dirsrv/slapd-standalone1/ldif/ticket48005.ldif) does not exist /usr/local/lib/python3.8/site-packages/lib389/tasks.py:473: ValueError -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. ------------------------------Captured stderr call------------------------------ grep: /etc/sysconfig/dirsrv: No such file or directory grep: /etc/sysconfig/dirsrv.systemd: No such file or directory ls: cannot access '/var/lib/dirsrv/slapd-standalone1/ldif/ticket48005.ldif': No such file or directory sh: /bin/dbgen.pl: No such file or directory grep: /var/lib/dirsrv/slapd-standalone1/ldif/ticket48005.ldif: No such file or directory -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48005_test:ticket48005_test.py:31 Ticket 48005 setup... [32mINFO [0m tests.tickets.ticket48005_test:ticket48005_test.py:41 No ulimit -c in /etc/sysconfig/dirsrv [32mINFO [0m tests.tickets.ticket48005_test:ticket48005_test.py:42 Adding it [32mINFO [0m tests.tickets.ticket48005_test:ticket48005_test.py:50 No LimitCORE in /etc/sysconfig/dirsrv.systemd [32mINFO [0m tests.tickets.ticket48005_test:ticket48005_test.py:51 Adding it [32mINFO [0m tests.tickets.ticket48005_test:ticket48005_test.py:64 dbgen_prog: /bin/dbgen.pl [32mINFO [0m tests.tickets.ticket48005_test:ticket48005_test.py:70 We have 0 entries. | |||
Failed | tickets/ticket48013_test.py::test_ticket48013 | 1.99 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7faa4458bd00> def test_ticket48013(topology_st): ''' Content Synchonization: Test that invalid cookies are caught ''' cookies = ('#', '##', 'a#a#a', 'a#a#1') # Enable dynamic plugins try: topology_st.standalone.modify_s(DN_CONFIG, [(ldap.MOD_REPLACE, 'nsslapd-dynamic-plugins', b'on')]) except ldap.LDAPError as e: log.error('Failed to enable dynamic plugin! {}'.format(e.args[0]['desc'])) assert False # Enable retro changelog topology_st.standalone.plugins.enable(name=PLUGIN_RETRO_CHANGELOG) # Enbale content sync plugin > topology_st.standalone.plugins.enable(name=PLUGIN_REPL_SYNC) /export/tests/tickets/ticket48013_test.py:61: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.8/site-packages/lib389/plugins.py:2105: in enable plugin.enable() /usr/local/lib/python3.8/site-packages/lib389/plugins.py:58: in enable self.set('nsslapd-pluginEnabled', 'on') /usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:446: in set return self._instance.modify_ext_s(self._dn, [(action, key, value)], /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:613: in modify_ext_s resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:764: in result3 resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:774: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa4458bee0> func = <built-in method result4 of LDAP object at 0x7faa474b1870> args = (7, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.SERVER_DOWN: {'result': -1, 'desc': "Can't contact LDAP server", 'ctrls': []} /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:324: SERVER_DOWN -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Failed | tickets/ticket48194_test.py::test_run_1 | 7.80 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7faa44759520> def test_run_1(topology_st): """ Check nsSSL3Ciphers: +all All ciphers are enabled except null. Note: default allowWeakCipher (i.e., off) for +all """ _header(topology_st, 'Test Case 2 - Check the ciphers availability for "+all" with default allowWeakCiphers') topology_st.standalone.simple_bind_s(DN_DM, PASSWORD) topology_st.standalone.modify_s(CONFIG_DN, [(ldap.MOD_REPLACE, 'nsslapd-errorlog-level', b'64')]) # Make sure allowWeakCipher is not set. topology_st.standalone.modify_s(ENCRYPTION_DN, [(ldap.MOD_DELETE, 'allowWeakCipher', None)]) log.info("\n######################### Restarting the server ######################\n") topology_st.standalone.stop(timeout=10) os.system('mv %s %s.48194_0' % (topology_st.standalone.errlog, topology_st.standalone.errlog)) os.system('touch %s' % (topology_st.standalone.errlog)) time.sleep(2) topology_st.standalone.start(timeout=120) > connectWithOpenssl(topology_st, 'DES-CBC3-SHA', False) /export/tests/tickets/ticket48194_test.py:158: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ topology_st = <lib389.topologies.TopologyMain object at 0x7faa44759520> cipher = 'DES-CBC3-SHA', expect = False def connectWithOpenssl(topology_st, cipher, expect): """ Connect with the given cipher Condition: If expect is True, the handshake should be successful. If expect is False, the handshake should be refused with access log: "Cannot communicate securely with peer: no common encryption algorithm(s)." """ log.info("Testing %s -- expect to handshake %s", cipher, "successfully" if expect else "failed") myurl = 'localhost:%s' % LDAPSPORT cmdline = ['/usr/bin/openssl', 's_client', '-connect', myurl, '-cipher', cipher] strcmdline = " ".join(cmdline) log.info("Running cmdline: %s", strcmdline) try: proc = subprocess.Popen(cmdline, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT) except ValueError: log.info("%s failed: %s", cmdline, ValueError) proc.kill() while True: l = proc.stdout.readline() if l == b"": break if b'Cipher is' in l: log.info("Found: %s", l) if expect: if b'(NONE)' in l: assert False else: proc.stdin.close() assert True else: if b'(NONE)' in l: assert True else: proc.stdin.close() > assert False E assert False /export/tests/tickets/ticket48194_test.py:117: AssertionError -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket48194_test.py:40 ############################################### [32mINFO [0m lib389:ticket48194_test.py:41 ####### Test Case 2 - Check the ciphers availability for "+all" with default allowWeakCiphers [32mINFO [0m lib389:ticket48194_test.py:42 ############################################### [32mINFO [0m lib389.utils:ticket48194_test.py:151 ######################### Restarting the server ###################### [32mINFO [0m lib389.utils:ticket48194_test.py:86 Testing DES-CBC3-SHA -- expect to handshake failed [32mINFO [0m lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher DES-CBC3-SHA [32mINFO [0m lib389.utils:ticket48194_test.py:105 Found: b'New, TLSv1.3, Cipher is TLS_AES_128_GCM_SHA256\n' | |||
Failed | tickets/ticket48194_test.py::test_run_2 | 7.54 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7faa44759520> def test_run_2(topology_st): """ Check nsSSL3Ciphers: +rsa_aes_128_sha,+rsa_aes_256_sha rsa_aes_128_sha, tls_rsa_aes_128_sha, rsa_aes_256_sha, tls_rsa_aes_256_sha are enabled. default allowWeakCipher """ _header(topology_st, 'Test Case 3 - Check the ciphers availability for "+rsa_aes_128_sha,+rsa_aes_256_sha" with default allowWeakCipher') topology_st.standalone.simple_bind_s(DN_DM, PASSWORD) topology_st.standalone.modify_s(ENCRYPTION_DN, [(ldap.MOD_REPLACE, 'nsSSL3Ciphers', b'+rsa_aes_128_sha,+rsa_aes_256_sha')]) log.info("\n######################### Restarting the server ######################\n") topology_st.standalone.stop(timeout=10) os.system('mv %s %s.48194_1' % (topology_st.standalone.errlog, topology_st.standalone.errlog)) os.system('touch %s' % (topology_st.standalone.errlog)) time.sleep(2) topology_st.standalone.start(timeout=120) connectWithOpenssl(topology_st, 'DES-CBC3-SHA', False) connectWithOpenssl(topology_st, 'AES256-SHA256', False) > connectWithOpenssl(topology_st, 'AES128-SHA', True) /export/tests/tickets/ticket48194_test.py:184: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ topology_st = <lib389.topologies.TopologyMain object at 0x7faa44759520> cipher = 'AES128-SHA', expect = True def connectWithOpenssl(topology_st, cipher, expect): """ Connect with the given cipher Condition: If expect is True, the handshake should be successful. If expect is False, the handshake should be refused with access log: "Cannot communicate securely with peer: no common encryption algorithm(s)." """ log.info("Testing %s -- expect to handshake %s", cipher, "successfully" if expect else "failed") myurl = 'localhost:%s' % LDAPSPORT cmdline = ['/usr/bin/openssl', 's_client', '-connect', myurl, '-cipher', cipher] strcmdline = " ".join(cmdline) log.info("Running cmdline: %s", strcmdline) try: proc = subprocess.Popen(cmdline, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT) except ValueError: log.info("%s failed: %s", cmdline, ValueError) proc.kill() while True: l = proc.stdout.readline() if l == b"": break if b'Cipher is' in l: log.info("Found: %s", l) if expect: if b'(NONE)' in l: > assert False E assert False /export/tests/tickets/ticket48194_test.py:108: AssertionError -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket48194_test.py:40 ############################################### [32mINFO [0m lib389:ticket48194_test.py:41 ####### Test Case 3 - Check the ciphers availability for "+rsa_aes_128_sha,+rsa_aes_256_sha" with default allowWeakCipher [32mINFO [0m lib389:ticket48194_test.py:42 ############################################### [32mINFO [0m lib389.utils:ticket48194_test.py:175 ######################### Restarting the server ###################### [32mINFO [0m lib389.utils:ticket48194_test.py:86 Testing DES-CBC3-SHA -- expect to handshake failed [32mINFO [0m lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher DES-CBC3-SHA [32mINFO [0m lib389.utils:ticket48194_test.py:105 Found: b'New, (NONE), Cipher is (NONE)\n' [32mINFO [0m lib389.utils:ticket48194_test.py:86 Testing AES256-SHA256 -- expect to handshake failed [32mINFO [0m lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher AES256-SHA256 [32mINFO [0m lib389.utils:ticket48194_test.py:105 Found: b'New, (NONE), Cipher is (NONE)\n' [32mINFO [0m lib389.utils:ticket48194_test.py:86 Testing AES128-SHA -- expect to handshake successfully [32mINFO [0m lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher AES128-SHA [32mINFO [0m lib389.utils:ticket48194_test.py:105 Found: b'New, (NONE), Cipher is (NONE)\n' | |||
Failed | tickets/ticket48194_test.py::test_run_4 | 7.71 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7faa44759520> def test_run_4(topology_st): """ Check no nsSSL3Ciphers Default ciphers are enabled. default allowWeakCipher """ _header(topology_st, 'Test Case 5 - Check no nsSSL3Ciphers (-all) with default allowWeakCipher') topology_st.standalone.simple_bind_s(DN_DM, PASSWORD) topology_st.standalone.modify_s(ENCRYPTION_DN, [(ldap.MOD_DELETE, 'nsSSL3Ciphers', b'-all')]) log.info("\n######################### Restarting the server ######################\n") topology_st.standalone.stop(timeout=10) os.system('mv %s %s.48194_3' % (topology_st.standalone.errlog, topology_st.standalone.errlog)) os.system('touch %s' % (topology_st.standalone.errlog)) time.sleep(2) topology_st.standalone.start(timeout=120) > connectWithOpenssl(topology_st, 'DES-CBC3-SHA', False) /export/tests/tickets/ticket48194_test.py:228: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ topology_st = <lib389.topologies.TopologyMain object at 0x7faa44759520> cipher = 'DES-CBC3-SHA', expect = False def connectWithOpenssl(topology_st, cipher, expect): """ Connect with the given cipher Condition: If expect is True, the handshake should be successful. If expect is False, the handshake should be refused with access log: "Cannot communicate securely with peer: no common encryption algorithm(s)." """ log.info("Testing %s -- expect to handshake %s", cipher, "successfully" if expect else "failed") myurl = 'localhost:%s' % LDAPSPORT cmdline = ['/usr/bin/openssl', 's_client', '-connect', myurl, '-cipher', cipher] strcmdline = " ".join(cmdline) log.info("Running cmdline: %s", strcmdline) try: proc = subprocess.Popen(cmdline, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT) except ValueError: log.info("%s failed: %s", cmdline, ValueError) proc.kill() while True: l = proc.stdout.readline() if l == b"": break if b'Cipher is' in l: log.info("Found: %s", l) if expect: if b'(NONE)' in l: assert False else: proc.stdin.close() assert True else: if b'(NONE)' in l: assert True else: proc.stdin.close() > assert False E assert False /export/tests/tickets/ticket48194_test.py:117: AssertionError -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket48194_test.py:40 ############################################### [32mINFO [0m lib389:ticket48194_test.py:41 ####### Test Case 5 - Check no nsSSL3Ciphers (-all) with default allowWeakCipher [32mINFO [0m lib389:ticket48194_test.py:42 ############################################### [32mINFO [0m lib389.utils:ticket48194_test.py:221 ######################### Restarting the server ###################### [32mINFO [0m lib389.utils:ticket48194_test.py:86 Testing DES-CBC3-SHA -- expect to handshake failed [32mINFO [0m lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher DES-CBC3-SHA [32mINFO [0m lib389.utils:ticket48194_test.py:105 Found: b'New, TLSv1.3, Cipher is TLS_AES_128_GCM_SHA256\n' | |||
Failed | tickets/ticket48194_test.py::test_run_5 | 7.71 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7faa44759520> def test_run_5(topology_st): """ Check nsSSL3Ciphers: default Default ciphers are enabled. default allowWeakCipher """ _header(topology_st, 'Test Case 6 - Check default nsSSL3Ciphers (default setting) with default allowWeakCipher') topology_st.standalone.simple_bind_s(DN_DM, PASSWORD) topology_st.standalone.modify_s(ENCRYPTION_DN, [(ldap.MOD_REPLACE, 'nsSSL3Ciphers', b'default')]) log.info("\n######################### Restarting the server ######################\n") topology_st.standalone.stop(timeout=10) os.system('mv %s %s.48194_4' % (topology_st.standalone.errlog, topology_st.standalone.errlog)) os.system('touch %s' % (topology_st.standalone.errlog)) time.sleep(2) topology_st.standalone.start(timeout=120) > connectWithOpenssl(topology_st, 'DES-CBC3-SHA', False) /export/tests/tickets/ticket48194_test.py:250: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ topology_st = <lib389.topologies.TopologyMain object at 0x7faa44759520> cipher = 'DES-CBC3-SHA', expect = False def connectWithOpenssl(topology_st, cipher, expect): """ Connect with the given cipher Condition: If expect is True, the handshake should be successful. If expect is False, the handshake should be refused with access log: "Cannot communicate securely with peer: no common encryption algorithm(s)." """ log.info("Testing %s -- expect to handshake %s", cipher, "successfully" if expect else "failed") myurl = 'localhost:%s' % LDAPSPORT cmdline = ['/usr/bin/openssl', 's_client', '-connect', myurl, '-cipher', cipher] strcmdline = " ".join(cmdline) log.info("Running cmdline: %s", strcmdline) try: proc = subprocess.Popen(cmdline, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT) except ValueError: log.info("%s failed: %s", cmdline, ValueError) proc.kill() while True: l = proc.stdout.readline() if l == b"": break if b'Cipher is' in l: log.info("Found: %s", l) if expect: if b'(NONE)' in l: assert False else: proc.stdin.close() assert True else: if b'(NONE)' in l: assert True else: proc.stdin.close() > assert False E assert False /export/tests/tickets/ticket48194_test.py:117: AssertionError -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket48194_test.py:40 ############################################### [32mINFO [0m lib389:ticket48194_test.py:41 ####### Test Case 6 - Check default nsSSL3Ciphers (default setting) with default allowWeakCipher [32mINFO [0m lib389:ticket48194_test.py:42 ############################################### [32mINFO [0m lib389.utils:ticket48194_test.py:243 ######################### Restarting the server ###################### [32mINFO [0m lib389.utils:ticket48194_test.py:86 Testing DES-CBC3-SHA -- expect to handshake failed [32mINFO [0m lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher DES-CBC3-SHA [32mINFO [0m lib389.utils:ticket48194_test.py:105 Found: b'New, TLSv1.3, Cipher is TLS_AES_128_GCM_SHA256\n' | |||
Failed | tickets/ticket48194_test.py::test_run_6 | 8.60 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7faa44759520> def test_run_6(topology_st): """ Check nsSSL3Ciphers: +all,-TLS_RSA_WITH_AES_256_CBC_SHA256 All ciphers are disabled. default allowWeakCipher """ _header(topology_st, 'Test Case 7 - Check nsSSL3Ciphers: +all,-TLS_RSA_WITH_AES_256_CBC_SHA256 with default allowWeakCipher') topology_st.standalone.simple_bind_s(DN_DM, PASSWORD) topology_st.standalone.modify_s(ENCRYPTION_DN, [(ldap.MOD_REPLACE, 'nsSSL3Ciphers', b'+all,-TLS_RSA_WITH_AES_256_CBC_SHA256')]) log.info("\n######################### Restarting the server ######################\n") topology_st.standalone.stop(timeout=10) os.system('mv %s %s.48194_5' % (topology_st.standalone.errlog, topology_st.standalone.errlog)) os.system('touch %s' % (topology_st.standalone.errlog)) time.sleep(2) topology_st.standalone.start(timeout=120) > connectWithOpenssl(topology_st, 'DES-CBC3-SHA', False) /export/tests/tickets/ticket48194_test.py:274: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ topology_st = <lib389.topologies.TopologyMain object at 0x7faa44759520> cipher = 'DES-CBC3-SHA', expect = False def connectWithOpenssl(topology_st, cipher, expect): """ Connect with the given cipher Condition: If expect is True, the handshake should be successful. If expect is False, the handshake should be refused with access log: "Cannot communicate securely with peer: no common encryption algorithm(s)." """ log.info("Testing %s -- expect to handshake %s", cipher, "successfully" if expect else "failed") myurl = 'localhost:%s' % LDAPSPORT cmdline = ['/usr/bin/openssl', 's_client', '-connect', myurl, '-cipher', cipher] strcmdline = " ".join(cmdline) log.info("Running cmdline: %s", strcmdline) try: proc = subprocess.Popen(cmdline, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT) except ValueError: log.info("%s failed: %s", cmdline, ValueError) proc.kill() while True: l = proc.stdout.readline() if l == b"": break if b'Cipher is' in l: log.info("Found: %s", l) if expect: if b'(NONE)' in l: assert False else: proc.stdin.close() assert True else: if b'(NONE)' in l: assert True else: proc.stdin.close() > assert False E assert False /export/tests/tickets/ticket48194_test.py:117: AssertionError -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket48194_test.py:40 ############################################### [32mINFO [0m lib389:ticket48194_test.py:41 ####### Test Case 7 - Check nsSSL3Ciphers: +all,-TLS_RSA_WITH_AES_256_CBC_SHA256 with default allowWeakCipher [32mINFO [0m lib389:ticket48194_test.py:42 ############################################### [32mINFO [0m lib389.utils:ticket48194_test.py:267 ######################### Restarting the server ###################### [32mINFO [0m lib389.utils:ticket48194_test.py:86 Testing DES-CBC3-SHA -- expect to handshake failed [32mINFO [0m lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher DES-CBC3-SHA [32mINFO [0m lib389.utils:ticket48194_test.py:105 Found: b'New, TLSv1.3, Cipher is TLS_AES_128_GCM_SHA256\n' | |||
Failed | tickets/ticket48194_test.py::test_run_8 | 8.10 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7faa44759520> def test_run_8(topology_st): """ Check nsSSL3Ciphers: default + allowWeakCipher: off Strong Default ciphers are enabled. """ _header(topology_st, 'Test Case 9 - Check default nsSSL3Ciphers (default setting + allowWeakCipher: off)') topology_st.standalone.simple_bind_s(DN_DM, PASSWORD) topology_st.standalone.modify_s(ENCRYPTION_DN, [(ldap.MOD_REPLACE, 'nsSSL3Ciphers', b'default'), (ldap.MOD_REPLACE, 'allowWeakCipher', b'off')]) log.info("\n######################### Restarting the server ######################\n") topology_st.standalone.stop(timeout=10) os.system('mv %s %s.48194_7' % (topology_st.standalone.errlog, topology_st.standalone.errlog)) os.system('touch %s' % (topology_st.standalone.errlog)) time.sleep(2) topology_st.standalone.start(timeout=120) > connectWithOpenssl(topology_st, 'DES-CBC3-SHA', False) /export/tests/tickets/ticket48194_test.py:297: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ topology_st = <lib389.topologies.TopologyMain object at 0x7faa44759520> cipher = 'DES-CBC3-SHA', expect = False def connectWithOpenssl(topology_st, cipher, expect): """ Connect with the given cipher Condition: If expect is True, the handshake should be successful. If expect is False, the handshake should be refused with access log: "Cannot communicate securely with peer: no common encryption algorithm(s)." """ log.info("Testing %s -- expect to handshake %s", cipher, "successfully" if expect else "failed") myurl = 'localhost:%s' % LDAPSPORT cmdline = ['/usr/bin/openssl', 's_client', '-connect', myurl, '-cipher', cipher] strcmdline = " ".join(cmdline) log.info("Running cmdline: %s", strcmdline) try: proc = subprocess.Popen(cmdline, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT) except ValueError: log.info("%s failed: %s", cmdline, ValueError) proc.kill() while True: l = proc.stdout.readline() if l == b"": break if b'Cipher is' in l: log.info("Found: %s", l) if expect: if b'(NONE)' in l: assert False else: proc.stdin.close() assert True else: if b'(NONE)' in l: assert True else: proc.stdin.close() > assert False E assert False /export/tests/tickets/ticket48194_test.py:117: AssertionError -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket48194_test.py:40 ############################################### [32mINFO [0m lib389:ticket48194_test.py:41 ####### Test Case 9 - Check default nsSSL3Ciphers (default setting + allowWeakCipher: off) [32mINFO [0m lib389:ticket48194_test.py:42 ############################################### [32mINFO [0m lib389.utils:ticket48194_test.py:290 ######################### Restarting the server ###################### [32mINFO [0m lib389.utils:ticket48194_test.py:86 Testing DES-CBC3-SHA -- expect to handshake failed [32mINFO [0m lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher DES-CBC3-SHA [32mINFO [0m lib389.utils:ticket48194_test.py:105 Found: b'New, TLSv1.3, Cipher is TLS_AES_128_GCM_SHA256\n' | |||
Failed | tickets/ticket48228_test.py::test_ticket48228_test_global_policy | 1.32 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7faa541962e0> user = 'uid=user1,dc=example,dc=com', passwd = 'password', times = 6 def update_passwd(topology_st, user, passwd, times): # Set the default value cpw = passwd for i in range(times): log.info(" Bind as {%s,%s}" % (user, cpw)) topology_st.standalone.simple_bind_s(user, cpw) # Now update the value for this iter. cpw = 'password%d' % i try: > topology_st.standalone.modify_s(user, [(ldap.MOD_REPLACE, 'userpassword', cpw.encode())]) /export/tests/tickets/ticket48228_test.py:136: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = ('uid=user1,dc=example,dc=com', [(2, 'userpassword', b'password0')]) kwargs = {} c_stack = [FrameInfo(frame=<frame at 0x55ebeb7cbdb0, file '/usr/local/lib/python3.8/site-packages/lib389/__init__.py', line 180,...mbda>', code_context=[' self._inner_hookexec = lambda hook, methods, kwargs: hook.multicall(\n'], index=0), ...] frame = FrameInfo(frame=<frame at 0x55ebeb22b0a0, file '/export/tests/tickets/ticket48228_test.py', line 141, code update_pass...t=[" topology_st.standalone.modify_s(user, [(ldap.MOD_REPLACE, 'userpassword', cpw.encode())])\n"], index=0) def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa541963a0> dn = 'uid=user1,dc=example,dc=com' modlist = [(2, 'userpassword', b'password0')] def modify_s(self,dn,modlist): > return self.modify_ext_s(dn,modlist,None,None) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:640: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = ('uid=user1,dc=example,dc=com', [(2, 'userpassword', b'password0')], None, None) kwargs = {} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa541963a0> dn = 'uid=user1,dc=example,dc=com' modlist = [(2, 'userpassword', b'password0')], serverctrls = None clientctrls = None def modify_ext_s(self,dn,modlist,serverctrls=None,clientctrls=None): msgid = self.modify_ext(dn,modlist,serverctrls,clientctrls) > resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:613: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (10,), kwargs = {'all': 1, 'timeout': -1} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa541963a0>, msgid = 10, all = 1 timeout = -1, resp_ctrl_classes = None def result3(self,msgid=ldap.RES_ANY,all=1,timeout=None,resp_ctrl_classes=None): > resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( msgid,all,timeout, add_ctrls=0,add_intermediates=0,add_extop=0, resp_ctrl_classes=resp_ctrl_classes ) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:764: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (10, 1, -1) kwargs = {'add_ctrls': 0, 'add_extop': 0, 'add_intermediates': 0, 'resp_ctrl_classes': None} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa541963a0>, msgid = 10, all = 1 timeout = -1, add_ctrls = 0, add_intermediates = 0, add_extop = 0 resp_ctrl_classes = None def result4(self,msgid=ldap.RES_ANY,all=1,timeout=None,add_ctrls=0,add_intermediates=0,add_extop=0,resp_ctrl_classes=None): if timeout is None: timeout = self.timeout > ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:774: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (<built-in method result4 of LDAP object at 0x7faa5422a0f0>, 10, 1, -1, 0, 0, ...) kwargs = {} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa541963a0> func = <built-in method result4 of LDAP object at 0x7faa5422a0f0> args = (10, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: result = func(*args,**kwargs) if __debug__ and self._trace_level>=2: if func.__name__!="unbind_ext": diagnostic_message_success = self._l.get_option(ldap.OPT_DIAGNOSTIC_MESSAGE) finally: self._ldap_object_lock.release() except LDAPError as e: exc_type,exc_value,exc_traceback = sys.exc_info() try: if 'info' not in e.args[0] and 'errno' in e.args[0]: e.args[0]['info'] = strerror(e.args[0]['errno']) except IndexError: pass if __debug__ and self._trace_level>=2: self._trace_file.write('=> LDAPError - %s: %s\n' % (e.__class__.__name__,str(e))) try: > reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:340: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ exc_type = <class 'ldap.INSUFFICIENT_ACCESS'> exc_value = INSUFFICIENT_ACCESS({'msgtype': 103, 'msgid': 10, 'result': 50, 'desc': 'Insufficient access', 'ctrls': [], 'info': "Insufficient 'write' privilege to the 'userPassword' attribute of entry 'uid=user1,dc=example,dc=com'.\n"}) exc_traceback = <traceback object at 0x7faa5411d880> def reraise(exc_type, exc_value, exc_traceback): """Re-raise an exception given information from sys.exc_info() Note that unlike six.reraise, this does not support replacing the traceback. All arguments must come from a single sys.exc_info() call. """ # In Python 3, all exception info is contained in one object. > raise exc_value /usr/lib64/python3.8/site-packages/ldap/compat.py:46: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa541963a0> func = <built-in method result4 of LDAP object at 0x7faa5422a0f0> args = (10, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.INSUFFICIENT_ACCESS: {'msgtype': 103, 'msgid': 10, 'result': 50, 'desc': 'Insufficient access', 'ctrls': [], 'info': "Insufficient 'write' privilege to the 'userPassword' attribute of entry 'uid=user1,dc=example,dc=com'.\n"} /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:324: INSUFFICIENT_ACCESS During handling of the above exception, another exception occurred: topology_st = <lib389.topologies.TopologyMain object at 0x7faa541962e0> def test_ticket48228_test_global_policy(topology_st): """ Check global password policy """ log.info(' Set inhistory = 6') set_global_pwpolicy(topology_st, 6) log.info(' Bind as directory manager') log.info("Bind as %s" % DN_DM) topology_st.standalone.simple_bind_s(DN_DM, PASSWORD) log.info(' Add an entry' + USER1_DN) try: topology_st.standalone.add_s( Entry((USER1_DN, {'objectclass': "top person organizationalPerson inetOrgPerson".split(), 'sn': '1', 'cn': 'user 1', 'uid': 'user1', 'givenname': 'user', 'mail': 'user1@example.com', 'userpassword': 'password'}))) except ldap.LDAPError as e: log.fatal('test_ticket48228: Failed to add user' + USER1_DN + ': error ' + e.message['desc']) assert False log.info(' Update the password of ' + USER1_DN + ' 6 times') > update_passwd(topology_st, USER1_DN, 'password', 6) /export/tests/tickets/ticket48228_test.py:174: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ topology_st = <lib389.topologies.TopologyMain object at 0x7faa541962e0> user = 'uid=user1,dc=example,dc=com', passwd = 'password', times = 6 def update_passwd(topology_st, user, passwd, times): # Set the default value cpw = passwd for i in range(times): log.info(" Bind as {%s,%s}" % (user, cpw)) topology_st.standalone.simple_bind_s(user, cpw) # Now update the value for this iter. cpw = 'password%d' % i try: topology_st.standalone.modify_s(user, [(ldap.MOD_REPLACE, 'userpassword', cpw.encode())]) except ldap.LDAPError as e: log.fatal( > 'test_ticket48228: Failed to update the password ' + cpw + ' of user ' + user + ': error ' + e.message[ 'desc']) E AttributeError: 'INSUFFICIENT_ACCESS' object has no attribute 'message' /export/tests/tickets/ticket48228_test.py:139: AttributeError -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Failed | tickets/ticket48234_test.py::test_ticket48234 | 0.27 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7faa5410df70> def test_ticket48234(topology_st): """ Test aci which contains an extensible filter. shutdown """ log.info('Bind as root DN') try: topology_st.standalone.simple_bind_s(DN_DM, PASSWORD) except ldap.LDAPError as e: topology_st.standalone.log.error('Root DN failed to authenticate: ' + e.args[0]['desc']) assert False ouname = 'outest' username = 'admin' passwd = 'Password' deniedattr = 'telephonenumber' log.info('Add aci which contains extensible filter.') aci_text = ('(targetattr = "%s")' % (deniedattr) + '(target = "ldap:///%s")' % (DEFAULT_SUFFIX) + '(version 3.0;acl "admin-tel-matching-rule-outest";deny (all)' + '(userdn = "ldap:///%s??sub?(&(cn=%s)(ou:dn:=%s))");)' % (DEFAULT_SUFFIX, username, ouname)) try: topology_st.standalone.modify_s(DEFAULT_SUFFIX, [(ldap.MOD_ADD, 'aci', ensure_bytes(aci_text))]) except ldap.LDAPError as e: log.error('Failed to add aci: (%s) error %s' % (aci_text, e.args[0]['desc'])) assert False log.info('Add entries ...') for idx in range(0, 2): ou0 = 'OU%d' % idx log.info('adding %s under %s...' % (ou0, DEFAULT_SUFFIX)) add_ou_entry(topology_st.standalone, ou0, DEFAULT_SUFFIX) parent = 'ou=%s,%s' % (ou0, DEFAULT_SUFFIX) log.info('adding %s under %s...' % (ouname, parent)) add_ou_entry(topology_st.standalone, ouname, parent) for idx in range(0, 2): parent = 'ou=%s,ou=OU%d,%s' % (ouname, idx, DEFAULT_SUFFIX) log.info('adding %s under %s...' % (username, parent)) add_user_entry(topology_st.standalone, username, passwd, parent) binddn = 'cn=%s,%s' % (username, parent) log.info('Bind as user %s' % binddn) try: topology_st.standalone.simple_bind_s(binddn, passwd) except ldap.LDAPError as e: topology_st.standalone.log.error(bindn + ' failed to authenticate: ' + e.args[0]['desc']) assert False filter = '(cn=%s)' % username try: entries = topology_st.standalone.search_s(DEFAULT_SUFFIX, ldap.SCOPE_SUBTREE, filter, [deniedattr, 'dn']) > assert 2 == len(entries) E assert 2 == 0 E +2 E -0 /export/tests/tickets/ticket48234_test.py:83: AssertionError -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48234_test:ticket48234_test.py:35 Bind as root DN [32mINFO [0m tests.tickets.ticket48234_test:ticket48234_test.py:46 Add aci which contains extensible filter. [32mINFO [0m tests.tickets.ticket48234_test:ticket48234_test.py:58 Add entries ... [32mINFO [0m tests.tickets.ticket48234_test:ticket48234_test.py:61 adding OU0 under dc=example,dc=com... [32mINFO [0m tests.tickets.ticket48234_test:ticket48234_test.py:64 adding outest under ou=OU0,dc=example,dc=com... [32mINFO [0m tests.tickets.ticket48234_test:ticket48234_test.py:61 adding OU1 under dc=example,dc=com... [32mINFO [0m tests.tickets.ticket48234_test:ticket48234_test.py:64 adding outest under ou=OU1,dc=example,dc=com... [32mINFO [0m tests.tickets.ticket48234_test:ticket48234_test.py:69 adding admin under ou=outest,ou=OU0,dc=example,dc=com... [32mINFO [0m tests.tickets.ticket48234_test:ticket48234_test.py:69 adding admin under ou=outest,ou=OU1,dc=example,dc=com... [32mINFO [0m tests.tickets.ticket48234_test:ticket48234_test.py:73 Bind as user cn=admin,ou=outest,ou=OU1,dc=example,dc=com | |||
Failed | tickets/ticket48266_test.py::test_ticket48266_count_csn_evaluation | 0.26 | |
topology_m2 = <lib389.topologies.TopologyMain object at 0x7faa47657280> entries = None def test_ticket48266_count_csn_evaluation(topology_m2, entries): ents = topology_m2.ms["master1"].agreement.list(suffix=SUFFIX) assert len(ents) == 1 > first_csn = _get_first_not_replicated_csn(topology_m2) /export/tests/tickets/ticket48266_test.py:176: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ topology_m2 = <lib389.topologies.TopologyMain object at 0x7faa47657280> def _get_first_not_replicated_csn(topology_m2): name = "cn=%s2,%s" % (NEW_ACCOUNT, SUFFIX) # read the first CSN that will not be replicated mod = [(ldap.MOD_REPLACE, 'telephonenumber', ensure_bytes('123456'))] topology_m2.ms["master1"].modify_s(name, mod) msgid = topology_m2.ms["master1"].search_ext(name, ldap.SCOPE_SUBTREE, 'objectclass=*', ['nscpentrywsi']) rtype, rdata, rmsgid = topology_m2.ms["master1"].result2(msgid) attrs = None for dn, raw_attrs in rdata: topology_m2.ms["master1"].log.info("dn: %s" % dn) if 'nscpentrywsi' in raw_attrs: attrs = raw_attrs['nscpentrywsi'] assert attrs for attr in attrs: if ensure_str(attr.lower()).startswith('telephonenumber'): break assert attr log.info("############# %s " % name) # now retrieve the CSN of the operation we are looking for csn = None found_ops = topology_m2.ms['master1'].ds_access_log.match(".*MOD dn=\"%s\".*" % name) assert(len(found_ops) > 0) found_op = topology_m2.ms['master1'].ds_access_log.parse_line(found_ops[-1]) log.info(found_op) # Now look for the related CSN found_csns = topology_m2.ms['master1'].ds_access_log.match(".*conn=%s op=%s RESULT.*" % (found_op['conn'], found_op['op'])) assert(len(found_csns) > 0) found_csn = topology_m2.ms['master1'].ds_access_log.parse_line(found_csns[-1]) log.info(found_csn) > return found_csn['csn'] E KeyError: 'csn' /export/tests/tickets/ticket48266_test.py:147: KeyError -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket48266_test.py:125 dn: cn=new_account2,dc=example,dc=com [32mINFO [0m tests.tickets.ticket48266_test:ticket48266_test.py:134 ############# cn=new_account2,dc=example,dc=com [32mINFO [0m tests.tickets.ticket48266_test:ticket48266_test.py:140 {'action': 'MOD', 'timestamp': '[09/Nov/2020:23:54:44.163528068 -0500]', 'conn': '1', 'op': '11', 'rem': 'dn="cn=new_account2,dc=example,dc=com"', 'datetime': datetime.datetime(2020, 11, 9, 23, 0, 0, 163528, tzinfo=tzoffset(None, -18000))} [32mINFO [0m tests.tickets.ticket48266_test:ticket48266_test.py:146 {'action': 'RESULT', 'timestamp': '[09/Nov/2020:23:54:44.292159122 -0500]', 'conn': '1', 'op': '11', 'rem': 'err=0 tag=103 nentries=0 wtime=0.002654887 optime=0.128666261 etime=0.131311408 csn=5faa1d14000000010000', 'datetime': datetime.datetime(2020, 11, 9, 23, 0, 0, 292159, tzinfo=tzoffset(None, -18000))} | |||
Failed | tickets/ticket48325_test.py::test_ticket48325 | 0.05 | |
topology_m1h1c1 = <lib389.topologies.TopologyMain object at 0x7faa5422a490> def test_ticket48325(topology_m1h1c1): """ Test that the RUV element order is correctly maintained when promoting a hub or consumer. """ # # Promote consumer to master # C1 = topology_m1h1c1.cs["consumer1"] M1 = topology_m1h1c1.ms["master1"] H1 = topology_m1h1c1.hs["hub1"] repl = ReplicationManager(DEFAULT_SUFFIX) > repl._ensure_changelog(C1) /export/tests/tickets/ticket48325_test.py:53: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.8/site-packages/lib389/replica.py:1928: in _ensure_changelog cl.create(properties={ /usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:971: in create return self._create(rdn, properties, basedn, ensure=False) /usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:946: in _create self._instance.add_ext_s(e, serverctrls=self._server_controls, clientctrls=self._client_controls, escapehatch='i am sure') /usr/local/lib/python3.8/site-packages/lib389/__init__.py:176: in inner return f(ent.dn, ent.toTupleList(), *args[2:]) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:425: in add_ext_s resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:764: in result3 resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:774: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa44768790> func = <built-in method result4 of LDAP object at 0x7faa44749450> args = (15, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.UNWILLING_TO_PERFORM: {'msgtype': 105, 'msgid': 15, 'result': 53, 'desc': 'Server is unwilling to perform', 'ctrls': [], 'info': 'Changelog configuration is part of the backend configuration'} /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:324: UNWILLING_TO_PERFORM -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for hub1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39101, 'ldap-secureport': 63801, 'server-id': 'hub1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for consumer1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:524 Creating replication topology. [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39101 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39101 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39101 is NOT working (expect 68763752-66ae-4b28-8ea0-08263f02bd45 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39101 is working [32mINFO [0m lib389.replica:replica.py:2211 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39101 [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39101 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is was created [32mINFO [0m lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39101 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 85a85f3d-2283-4bac-8ef4-b7903844fee9 / got description=68763752-66ae-4b28-8ea0-08263f02bd45) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is working | |||
Failed | tickets/ticket48342_test.py::test_ticket4026 | 93.81 | |
topology_m3 = <lib389.topologies.TopologyMain object at 0x7faa5413bdc0> def test_ticket4026(topology_m3): """Write your replication testcase here. To access each DirSrv instance use: topology_m3.ms["master1"], topology_m3.ms["master2"], ..., topology_m3.hub1, ..., topology_m3.consumer1, ... Also, if you need any testcase initialization, please, write additional fixture for that(include finalizer). """ try: topology_m3.ms["master1"].add_s(Entry((PEOPLE_DN, { 'objectclass': "top extensibleObject".split(), 'ou': 'people'}))) except ldap.ALREADY_EXISTS: pass topology_m3.ms["master1"].add_s(Entry(('ou=ranges,' + SUFFIX, { 'objectclass': 'top organizationalunit'.split(), 'ou': 'ranges' }))) for cpt in range(MAX_ACCOUNTS): name = "user%d" % (cpt) topology_m3.ms["master1"].add_s(Entry(("uid=%s,%s" % (name, PEOPLE_DN), { 'objectclass': 'top posixAccount extensibleObject'.split(), 'uid': name, 'cn': name, 'uidNumber': '1', 'gidNumber': '1', 'homeDirectory': '/home/%s' % name }))) # make master3 having more free slots that master2 # so master1 will contact master3 _dna_config(topology_m3.ms["master1"], nextValue=100, maxValue=10) _dna_config(topology_m3.ms["master2"], nextValue=200, maxValue=10) _dna_config(topology_m3.ms["master3"], nextValue=300, maxValue=3000) # Turn on lots of error logging now. mod = [(ldap.MOD_REPLACE, 'nsslapd-errorlog-level', b'16384')] # mod = [(ldap.MOD_REPLACE, 'nsslapd-errorlog-level', '1')] topology_m3.ms["master1"].modify_s('cn=config', mod) topology_m3.ms["master2"].modify_s('cn=config', mod) topology_m3.ms["master3"].modify_s('cn=config', mod) # We need to wait for the event in dna.c to fire to start the servers # see dna.c line 899 time.sleep(60) # add on master1 users with description DNA for cpt in range(10): name = "user_with_desc1_%d" % (cpt) topology_m3.ms["master1"].add_s(Entry(("uid=%s,%s" % (name, PEOPLE_DN), { 'objectclass': 'top posixAccount extensibleObject'.split(), 'uid': name, 'cn': name, 'description': '-1', 'uidNumber': '1', 'gidNumber': '1', 'homeDirectory': '/home/%s' % name }))) # give time to negociate master1 <--> master3 time.sleep(10) # add on master1 users with description DNA for cpt in range(11, 20): name = "user_with_desc1_%d" % (cpt) > topology_m3.ms["master1"].add_s(Entry(("uid=%s,%s" % (name, PEOPLE_DN), { 'objectclass': 'top posixAccount extensibleObject'.split(), 'uid': name, 'cn': name, 'description': '-1', 'uidNumber': '1', 'gidNumber': '1', 'homeDirectory': '/home/%s' % name }))) /export/tests/tickets/ticket48342_test.py:118: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.8/site-packages/lib389/__init__.py:176: in inner return f(ent.dn, ent.toTupleList(), *args[2:]) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:439: in add_s return self.add_ext_s(dn,modlist,None,None) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:178: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:425: in add_ext_s resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:764: in result3 resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:774: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa5413b9d0> func = <built-in method result4 of LDAP object at 0x7faa5414eb40> args = (15, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.OPERATIONS_ERROR: {'msgtype': 105, 'msgid': 15, 'result': 1, 'desc': 'Operations error', 'ctrls': [], 'info': 'Allocation of a new value for range cn=dna config,cn=distributed numeric assignment plugin,cn=plugins,cn=config failed! Unable to proceed.'} /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:324: OPERATIONS_ERROR -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master3 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'master3', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 09da5770-b5c0-42c8-8954-1342d57cc465 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 90537bf7-1318-4211-964a-09711d600b0d / got description=09da5770-b5c0-42c8-8954-1342d57cc465) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master3 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 5d3c7de0-4e63-4bff-b00b-b8789d895d07 / got description=90537bf7-1318-4211-964a-09711d600b0d) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect bac065d8-f139-481b-9d32-e10a275e63db / got description=5d3c7de0-4e63-4bff-b00b-b8789d895d07) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master3 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master3 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master2 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48342_test:ticket48342_test.py:19 Add dna plugin config entry...ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 [32mINFO [0m tests.tickets.ticket48342_test:ticket48342_test.py:37 Enable the DNA plugin... [32mINFO [0m tests.tickets.ticket48342_test:ticket48342_test.py:44 Restarting the server... [32mINFO [0m tests.tickets.ticket48342_test:ticket48342_test.py:19 Add dna plugin config entry...ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m tests.tickets.ticket48342_test:ticket48342_test.py:37 Enable the DNA plugin... [32mINFO [0m tests.tickets.ticket48342_test:ticket48342_test.py:44 Restarting the server... [32mINFO [0m tests.tickets.ticket48342_test:ticket48342_test.py:19 Add dna plugin config entry...ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 [32mINFO [0m tests.tickets.ticket48342_test:ticket48342_test.py:37 Enable the DNA plugin... [32mINFO [0m tests.tickets.ticket48342_test:ticket48342_test.py:44 Restarting the server... | |||
Failed | tickets/ticket48637_test.py::test_ticket48637 | 4.93 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7faa473c5d90> def test_ticket48637(topology_st): """Test for entry cache corruption This requires automember and managed entry plugins to be configured. Then remove the group that automember would use to trigger a failure when adding a new entry. Automember fails, and then managed entry also fails. Make sure a base search on the entry returns error 32 """ if DEBUGGING: # Add debugging steps(if any)... pass # # Add our setup entries # try: topology_st.standalone.add_s(Entry((PEOPLE_OU, { 'objectclass': 'top organizationalunit'.split(), 'ou': 'people'}))) except ldap.ALREADY_EXISTS: pass except ldap.LDAPError as e: log.fatal('Failed to add people ou: ' + str(e)) assert False try: topology_st.standalone.add_s(Entry((GROUP_OU, { 'objectclass': 'top organizationalunit'.split(), 'ou': 'groups'}))) except ldap.ALREADY_EXISTS: pass except ldap.LDAPError as e: log.fatal('Failed to add groups ou: ' + str(e)) assert False try: topology_st.standalone.add_s(Entry((MEP_OU, { 'objectclass': 'top extensibleObject'.split(), 'ou': 'mep'}))) except ldap.LDAPError as e: log.fatal('Failed to add MEP ou: ' + str(e)) assert False try: topology_st.standalone.add_s(Entry((MEP_TEMPLATE, { 'objectclass': 'top mepTemplateEntry'.split(), 'cn': 'mep template', 'mepRDNAttr': 'cn', 'mepStaticAttr': 'objectclass: groupofuniquenames', 'mepMappedAttr': 'cn: $uid'}))) except ldap.LDAPError as e: log.fatal('Failed to add MEP ou: ' + str(e)) assert False # # Configure automember # try: topology_st.standalone.add_s(Entry((AUTO_DN, { 'cn': 'All Users', 'objectclass': ['top', 'autoMemberDefinition'], 'autoMemberScope': 'dc=example,dc=com', 'autoMemberFilter': 'objectclass=person', 'autoMemberDefaultGroup': GROUP_DN, 'autoMemberGroupingAttr': 'uniquemember:dn'}))) except ldap.LDAPError as e: log.fatal('Failed to configure automember plugin : ' + str(e)) assert False # # Configure managed entry plugin # try: topology_st.standalone.add_s(Entry((MEP_DN, { 'cn': 'MEP Definition', 'objectclass': ['top', 'extensibleObject'], 'originScope': 'ou=people,dc=example,dc=com', 'originFilter': 'objectclass=person', 'managedBase': 'ou=groups,dc=example,dc=com', 'managedTemplate': MEP_TEMPLATE}))) except ldap.LDAPError as e: log.fatal('Failed to configure managed entry plugin : ' + str(e)) assert False # # Restart DS # topology_st.standalone.restart(timeout=30) # # Add entry that should fail since the automember group does not exist # try: topology_st.standalone.add_s(Entry((USER_DN, { 'uid': 'test', 'objectclass': ['top', 'person', 'extensibleObject'], 'sn': 'test', 'cn': 'test'}))) except ldap.LDAPError as e: pass # # Search for the entry - it should not be returned # try: entry = topology_st.standalone.search_s(USER_DN, ldap.SCOPE_SUBTREE, 'objectclass=*') if entry: log.fatal('Entry was incorrectly returned') > assert False E assert False /export/tests/tickets/ticket48637_test.py:139: AssertionError -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [31mCRITICAL[0m tests.tickets.ticket48637_test:ticket48637_test.py:138 Entry was incorrectly returned | |||
Failed | tickets/ticket48784_test.py::test_ticket48784 | 40.56 | |
Fixture "add_entry" called directly. Fixtures are not meant to be called directly, but are created automatically when test functions request them as parameters. See https://docs.pytest.org/en/latest/fixture.html for more information about fixtures, and https://docs.pytest.org/en/latest/deprecations.html#calling-fixtures-directly about how to update your code. -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 25005d4b-31ed-4a0c-bf8a-d4d9b5ffeb0c / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 0f8ad908-764f-427a-b68f-d3ec2de0f65f / got description=25005d4b-31ed-4a0c-bf8a-d4d9b5ffeb0c) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48784_test:ticket48784_test.py:90 Ticket 48784 - Allow usage of OpenLDAP libraries that don't use NSS for crypto [32mINFO [0m tests.tickets.ticket48784_test:ticket48784_test.py:50 ######################### Configure SSL/TLS agreements ###################### [32mINFO [0m tests.tickets.ticket48784_test:ticket48784_test.py:51 ######################## master1 <-- startTLS -> master2 ##################### [32mINFO [0m tests.tickets.ticket48784_test:ticket48784_test.py:53 ##### Update the agreement of master1 [32mINFO [0m tests.tickets.ticket48784_test:ticket48784_test.py:58 ##### Update the agreement of master2 [32mINFO [0m tests.tickets.ticket48784_test:ticket48784_test.py:68 ######################### Configure SSL/TLS agreements Done ###################### | |||
Failed | tickets/ticket48798_test.py::test_ticket48798 | 17.18 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7faa471f5190> def test_ticket48798(topology_st): """ Test DH param sizes offered by DS. """ topology_st.standalone.enable_tls() # Confirm that we have a connection, and that it has DH # Open a socket to the port. # Check the security settings. > size = check_socket_dh_param_size(topology_st.standalone.host, topology_st.standalone.sslport) /export/tests/tickets/ticket48798_test.py:46: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /export/tests/tickets/ticket48798_test.py:23: in check_socket_dh_param_size output = check_output(cmd, shell=True) /usr/lib64/python3.8/subprocess.py:411: in check_output return run(*popenargs, stdout=PIPE, timeout=timeout, check=True, _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ input = None, capture_output = False, timeout = None, check = True popenargs = ('echo quit | openssl s_client -connect ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63601 -msg -cipher DH | grep -A 1 ServerKeyExchange',) kwargs = {'shell': True, 'stdout': -1} process = <subprocess.Popen object at 0x7faa471e8eb0>, stdout = b'' stderr = None, retcode = 1 def run(*popenargs, input=None, capture_output=False, timeout=None, check=False, **kwargs): """Run command with arguments and return a CompletedProcess instance. The returned instance will have attributes args, returncode, stdout and stderr. By default, stdout and stderr are not captured, and those attributes will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them. If check is True and the exit code was non-zero, it raises a CalledProcessError. The CalledProcessError object will have the return code in the returncode attribute, and output & stderr attributes if those streams were captured. If timeout is given, and the process takes too long, a TimeoutExpired exception will be raised. There is an optional argument "input", allowing you to pass bytes or a string to the subprocess's stdin. If you use this argument you may not also use the Popen constructor's "stdin" argument, as it will be used internally. By default, all communication is in bytes, and therefore any "input" should be bytes, and the stdout and stderr will be bytes. If in text mode, any "input" should be a string, and stdout and stderr will be strings decoded according to locale encoding, or by "encoding" if set. Text mode is triggered by setting any of text, encoding, errors or universal_newlines. The other arguments are the same as for the Popen constructor. """ if input is not None: if kwargs.get('stdin') is not None: raise ValueError('stdin and input arguments may not both be used.') kwargs['stdin'] = PIPE if capture_output: if kwargs.get('stdout') is not None or kwargs.get('stderr') is not None: raise ValueError('stdout and stderr arguments may not be used ' 'with capture_output.') kwargs['stdout'] = PIPE kwargs['stderr'] = PIPE with Popen(*popenargs, **kwargs) as process: try: stdout, stderr = process.communicate(input, timeout=timeout) except TimeoutExpired as exc: process.kill() if _mswindows: # Windows accumulates the output in a single blocking # read() call run on child threads, with the timeout # being done in a join() on those threads. communicate() # _after_ kill() is required to collect that and add it # to the exception. exc.stdout, exc.stderr = process.communicate() else: # POSIX _communicate already populated the output so # far into the TimeoutExpired exception. process.wait() raise except: # Including KeyboardInterrupt, communicate handled that. process.kill() # We don't call process.wait() as .__exit__ does that for us. raise retcode = process.poll() if check and retcode: > raise CalledProcessError(retcode, process.args, output=stdout, stderr=stderr) E subprocess.CalledProcessError: Command 'echo quit | openssl s_client -connect ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63601 -msg -cipher DH | grep -A 1 ServerKeyExchange' returned non-zero exit status 1. /usr/lib64/python3.8/subprocess.py:512: CalledProcessError -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. ------------------------------Captured stderr call------------------------------ depth=1 C = AU, ST = Queensland, L = 389ds, O = testing, CN = ssca.389ds.example.com verify return:1 depth=0 C = AU, ST = Queensland, L = 389ds, O = testing, GN = 592d3059-a6cc-47e3-a344-b608dcb269ad, CN = ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com verify return:1 DONE | |||
Failed | tickets/ticket48808_test.py::test_ticket48808 | 8.24 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7faa4770cbb0> create_user = None def test_ticket48808(topology_st, create_user): log.info('Run multiple paging controls on a single connection') users_num = 100 page_size = 30 users_list = add_users(topology_st, users_num) search_flt = r'(uid=test*)' searchreq_attrlist = ['dn', 'sn'] log.info('Set user bind') topology_st.standalone.simple_bind_s(TEST_USER_DN, TEST_USER_PWD) log.info('Create simple paged results control instance') req_ctrl = SimplePagedResultsControl(True, size=page_size, cookie='') controls = [req_ctrl] for ii in range(3): log.info('Iteration %d' % ii) msgid = topology_st.standalone.search_ext(DEFAULT_SUFFIX, ldap.SCOPE_SUBTREE, search_flt, searchreq_attrlist, serverctrls=controls) rtype, rdata, rmsgid, rctrls = topology_st.standalone.result3(msgid) pctrls = [ c for c in rctrls if c.controlType == SimplePagedResultsControl.controlType ] req_ctrl.cookie = pctrls[0].cookie msgid = topology_st.standalone.search_ext(DEFAULT_SUFFIX, ldap.SCOPE_SUBTREE, search_flt, searchreq_attrlist, serverctrls=controls) log.info('Set Directory Manager bind back') topology_st.standalone.simple_bind_s(DN_DM, PASSWORD) del_users(topology_st, users_list) log.info('Abandon the search') users_num = 10 page_size = 0 users_list = add_users(topology_st, users_num) search_flt = r'(uid=test*)' searchreq_attrlist = ['dn', 'sn'] log.info('Set user bind') topology_st.standalone.simple_bind_s(TEST_USER_DN, TEST_USER_PWD) log.info('Create simple paged results control instance') req_ctrl = SimplePagedResultsControl(True, size=page_size, cookie='') controls = [req_ctrl] msgid = topology_st.standalone.search_ext(DEFAULT_SUFFIX, ldap.SCOPE_SUBTREE, search_flt, searchreq_attrlist, serverctrls=controls) rtype, rdata, rmsgid, rctrls = topology_st.standalone.result3(msgid) pctrls = [ c for c in rctrls if c.controlType == SimplePagedResultsControl.controlType ] assert not pctrls[0].cookie log.info('Set Directory Manager bind back') topology_st.standalone.simple_bind_s(DN_DM, PASSWORD) del_users(topology_st, users_list) log.info("Search should fail with 'nsPagedSizeLimit = 5'" "and 'nsslapd-pagedsizelimit = 15' with 10 users") conf_attr = b'15' user_attr = b'5' expected_rs = ldap.SIZELIMIT_EXCEEDED users_num = 10 page_size = 10 users_list = add_users(topology_st, users_num) search_flt = r'(uid=test*)' searchreq_attrlist = ['dn', 'sn'] conf_attr_bck = change_conf_attr(topology_st, DN_CONFIG, 'nsslapd-pagedsizelimit', conf_attr) user_attr_bck = change_conf_attr(topology_st, TEST_USER_DN, 'nsPagedSizeLimit', user_attr) log.info('Set user bind') topology_st.standalone.simple_bind_s(TEST_USER_DN, TEST_USER_PWD) log.info('Create simple paged results control instance') req_ctrl = SimplePagedResultsControl(True, size=page_size, cookie='') controls = [req_ctrl] log.info('Expect to fail with SIZELIMIT_EXCEEDED') with pytest.raises(expected_rs): > all_results = paged_search(topology_st, controls, search_flt, searchreq_attrlist) E Failed: DID NOT RAISE <class 'ldap.SIZELIMIT_EXCEEDED'> /export/tests/tickets/ticket48808_test.py:252: Failed -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48808_test:ticket48808_test.py:159 Run multiple paging controls on a single connection [32mINFO [0m tests.tickets.ticket48808_test:ticket48808_test.py:48 Adding 100 users [32mINFO [0m tests.tickets.ticket48808_test:ticket48808_test.py:166 Set user bind [32mINFO [0m tests.tickets.ticket48808_test:ticket48808_test.py:169 Create simple paged results control instance [32mINFO [0m tests.tickets.ticket48808_test:ticket48808_test.py:174 Iteration 0 [32mINFO [0m tests.tickets.ticket48808_test:ticket48808_test.py:174 Iteration 1 [32mINFO [0m tests.tickets.ticket48808_test:ticket48808_test.py:174 Iteration 2 [32mINFO [0m tests.tickets.ticket48808_test:ticket48808_test.py:193 Set Directory Manager bind back [32mINFO [0m tests.tickets.ticket48808_test:ticket48808_test.py:75 Deleting 100 users [32mINFO [0m tests.tickets.ticket48808_test:ticket48808_test.py:197 Abandon the search [32mINFO [0m tests.tickets.ticket48808_test:ticket48808_test.py:48 Adding 10 users [32mINFO [0m tests.tickets.ticket48808_test:ticket48808_test.py:204 Set user bind [32mINFO [0m tests.tickets.ticket48808_test:ticket48808_test.py:207 Create simple paged results control instance [32mINFO [0m tests.tickets.ticket48808_test:ticket48808_test.py:224 Set Directory Manager bind back [32mINFO [0m tests.tickets.ticket48808_test:ticket48808_test.py:75 Deleting 10 users [32mINFO [0m tests.tickets.ticket48808_test:ticket48808_test.py:228 Search should fail with 'nsPagedSizeLimit = 5'and 'nsslapd-pagedsizelimit = 15' with 10 users [32mINFO [0m tests.tickets.ticket48808_test:ticket48808_test.py:48 Adding 10 users [32mINFO [0m tests.tickets.ticket48808_test:ticket48808_test.py:95 Set nsslapd-pagedsizelimit to b'15'. Previous value - [b'0']. Modified suffix - cn=config. [32mINFO [0m tests.tickets.ticket48808_test:ticket48808_test.py:95 Set nsPagedSizeLimit to b'5'. Previous value - None. Modified suffix - uid=simplepaged_test,dc=example,dc=com. [32mINFO [0m tests.tickets.ticket48808_test:ticket48808_test.py:243 Set user bind [32mINFO [0m tests.tickets.ticket48808_test:ticket48808_test.py:246 Create simple paged results control instance [32mINFO [0m tests.tickets.ticket48808_test:ticket48808_test.py:250 Expect to fail with SIZELIMIT_EXCEEDED [32mINFO [0m tests.tickets.ticket48808_test:ticket48808_test.py:130 Getting page 0 | |||
Failed | tickets/ticket48896_test.py::test_ticket48896 | 0.10 | |
server = <lib389.DirSrv object at 0x7faa542949d0>, curpw = 'password' newpw = 'Abcd012+', expstr = 'be ok', rc = 0 def replace_pw(server, curpw, newpw, expstr, rc): log.info('Binding as {%s, %s}' % (TESTDN, curpw)) server.simple_bind_s(TESTDN, curpw) hit = 0 log.info('Replacing password: %s -> %s, which should %s' % (curpw, newpw, expstr)) try: > server.modify_s(TESTDN, [(ldap.MOD_REPLACE, 'userPassword', ensure_bytes(newpw))]) /export/tests/tickets/ticket48896_test.py:53: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = ('uid=buser123,dc=example,dc=com', [(2, 'userPassword', b'Abcd012+')]) kwargs = {} c_stack = [FrameInfo(frame=<frame at 0x55ebeb301250, file '/usr/local/lib/python3.8/site-packages/lib389/__init__.py', line 180,...mbda>', code_context=[' self._inner_hookexec = lambda hook, methods, kwargs: hook.multicall(\n'], index=0), ...] frame = FrameInfo(frame=<frame at 0x55ebeb867110, file '/export/tests/tickets/ticket48896_test.py', line 57, code replace_pw>,...code_context=[" server.modify_s(TESTDN, [(ldap.MOD_REPLACE, 'userPassword', ensure_bytes(newpw))])\n"], index=0) def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa542949d0> dn = 'uid=buser123,dc=example,dc=com' modlist = [(2, 'userPassword', b'Abcd012+')] def modify_s(self,dn,modlist): > return self.modify_ext_s(dn,modlist,None,None) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:640: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = ('uid=buser123,dc=example,dc=com', [(2, 'userPassword', b'Abcd012+')], None, None) kwargs = {} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa542949d0> dn = 'uid=buser123,dc=example,dc=com' modlist = [(2, 'userPassword', b'Abcd012+')], serverctrls = None clientctrls = None def modify_ext_s(self,dn,modlist,serverctrls=None,clientctrls=None): msgid = self.modify_ext(dn,modlist,serverctrls,clientctrls) > resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:613: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (8,), kwargs = {'all': 1, 'timeout': -1} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa542949d0>, msgid = 8, all = 1 timeout = -1, resp_ctrl_classes = None def result3(self,msgid=ldap.RES_ANY,all=1,timeout=None,resp_ctrl_classes=None): > resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( msgid,all,timeout, add_ctrls=0,add_intermediates=0,add_extop=0, resp_ctrl_classes=resp_ctrl_classes ) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:764: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (8, 1, -1) kwargs = {'add_ctrls': 0, 'add_extop': 0, 'add_intermediates': 0, 'resp_ctrl_classes': None} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa542949d0>, msgid = 8, all = 1 timeout = -1, add_ctrls = 0, add_intermediates = 0, add_extop = 0 resp_ctrl_classes = None def result4(self,msgid=ldap.RES_ANY,all=1,timeout=None,add_ctrls=0,add_intermediates=0,add_extop=0,resp_ctrl_classes=None): if timeout is None: timeout = self.timeout > ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:774: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (<built-in method result4 of LDAP object at 0x7faa5408a870>, 8, 1, -1, 0, 0, ...) kwargs = {} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa542949d0> func = <built-in method result4 of LDAP object at 0x7faa5408a870> args = (8, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: result = func(*args,**kwargs) if __debug__ and self._trace_level>=2: if func.__name__!="unbind_ext": diagnostic_message_success = self._l.get_option(ldap.OPT_DIAGNOSTIC_MESSAGE) finally: self._ldap_object_lock.release() except LDAPError as e: exc_type,exc_value,exc_traceback = sys.exc_info() try: if 'info' not in e.args[0] and 'errno' in e.args[0]: e.args[0]['info'] = strerror(e.args[0]['errno']) except IndexError: pass if __debug__ and self._trace_level>=2: self._trace_file.write('=> LDAPError - %s: %s\n' % (e.__class__.__name__,str(e))) try: > reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:340: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ exc_type = <class 'ldap.INSUFFICIENT_ACCESS'> exc_value = INSUFFICIENT_ACCESS({'msgtype': 103, 'msgid': 8, 'result': 50, 'desc': 'Insufficient access', 'ctrls': [], 'info': "Insufficient 'write' privilege to the 'userPassword' attribute of entry 'uid=buser123,dc=example,dc=com'.\n"}) exc_traceback = <traceback object at 0x7faa5ad29600> def reraise(exc_type, exc_value, exc_traceback): """Re-raise an exception given information from sys.exc_info() Note that unlike six.reraise, this does not support replacing the traceback. All arguments must come from a single sys.exc_info() call. """ # In Python 3, all exception info is contained in one object. > raise exc_value /usr/lib64/python3.8/site-packages/ldap/compat.py:46: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa542949d0> func = <built-in method result4 of LDAP object at 0x7faa5408a870> args = (8, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.INSUFFICIENT_ACCESS: {'msgtype': 103, 'msgid': 8, 'result': 50, 'desc': 'Insufficient access', 'ctrls': [], 'info': "Insufficient 'write' privilege to the 'userPassword' attribute of entry 'uid=buser123,dc=example,dc=com'.\n"} /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:324: INSUFFICIENT_ACCESS During handling of the above exception, another exception occurred: topology_st = <lib389.topologies.TopologyMain object at 0x7faa54294520> def test_ticket48896(topology_st): """ """ log.info('Testing Ticket 48896 - Default Setting for passwordMinTokenLength does not work') log.info("Setting global password policy with password syntax.") topology_st.standalone.simple_bind_s(DN_DM, PASSWORD) topology_st.standalone.modify_s(CONFIG_DN, [(ldap.MOD_REPLACE, 'passwordCheckSyntax', b'on'), (ldap.MOD_REPLACE, 'nsslapd-pwpolicy-local', b'on')]) config = topology_st.standalone.search_s(CONFIG_DN, ldap.SCOPE_BASE, 'cn=*') mintokenlen = config[0].getValue('passwordMinTokenLength') history = config[0].getValue('passwordInHistory') log.info('Default passwordMinTokenLength == %s' % mintokenlen) log.info('Default passwordInHistory == %s' % history) log.info('Adding a user.') curpw = 'password' topology_st.standalone.add_s(Entry((TESTDN, {'objectclass': "top person organizationalPerson inetOrgPerson".split(), 'cn': 'test user', 'sn': 'user', 'userPassword': curpw}))) newpw = 'Abcd012+' exp = 'be ok' rc = 0 > replace_pw(topology_st.standalone, curpw, newpw, exp, rc) /export/tests/tickets/ticket48896_test.py:94: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ server = <lib389.DirSrv object at 0x7faa542949d0>, curpw = 'password' newpw = 'Abcd012+', expstr = 'be ok', rc = 0 def replace_pw(server, curpw, newpw, expstr, rc): log.info('Binding as {%s, %s}' % (TESTDN, curpw)) server.simple_bind_s(TESTDN, curpw) hit = 0 log.info('Replacing password: %s -> %s, which should %s' % (curpw, newpw, expstr)) try: server.modify_s(TESTDN, [(ldap.MOD_REPLACE, 'userPassword', ensure_bytes(newpw))]) except Exception as e: log.info("Exception (expected): %s" % type(e).__name__) hit = 1 > assert isinstance(e, rc) E TypeError: isinstance() arg 2 must be a type or tuple of types /export/tests/tickets/ticket48896_test.py:57: TypeError -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48896_test:ticket48896_test.py:69 Testing Ticket 48896 - Default Setting for passwordMinTokenLength does not work [32mINFO [0m tests.tickets.ticket48896_test:ticket48896_test.py:71 Setting global password policy with password syntax. [32mINFO [0m tests.tickets.ticket48896_test:ticket48896_test.py:80 Default passwordMinTokenLength == b'3' [32mINFO [0m tests.tickets.ticket48896_test:ticket48896_test.py:81 Default passwordInHistory == b'6' [32mINFO [0m tests.tickets.ticket48896_test:ticket48896_test.py:83 Adding a user. [32mINFO [0m tests.tickets.ticket48896_test:ticket48896_test.py:47 Binding as {uid=buser123,dc=example,dc=com, password} [32mINFO [0m tests.tickets.ticket48896_test:ticket48896_test.py:51 Replacing password: password -> Abcd012+, which should be ok [32mINFO [0m tests.tickets.ticket48896_test:ticket48896_test.py:55 Exception (expected): INSUFFICIENT_ACCESS | |||
Failed | tickets/ticket48916_test.py::test_ticket48916 | 48.58 | |
topology_m2 = <lib389.topologies.TopologyMain object at 0x7faa473459a0> def test_ticket48916(topology_m2): """ https://bugzilla.redhat.com/show_bug.cgi?id=1353629 This is an issue with ID exhaustion in DNA causing a crash. To access each DirSrv instance use: topology_m2.ms["master1"], topology_m2.ms["master2"], ..., topology_m2.hub1, ..., topology_m2.consumer1,... """ if DEBUGGING: # Add debugging steps(if any)... pass # Enable the plugin on both servers dna_m1 = topology_m2.ms["master1"].plugins.get('Distributed Numeric Assignment Plugin') dna_m2 = topology_m2.ms["master2"].plugins.get('Distributed Numeric Assignment Plugin') # Configure it # Create the container for the ranges to go into. topology_m2.ms["master1"].add_s(Entry( ('ou=Ranges,%s' % DEFAULT_SUFFIX, { 'objectClass': 'top organizationalUnit'.split(' '), 'ou': 'Ranges', }) )) # Create the dnaAdmin? # For now we just pinch the dn from the dna_m* types, and add the relevant child config # but in the future, this could be a better plugin template type from lib389 config_dn = dna_m1.dn topology_m2.ms["master1"].add_s(Entry( ('cn=uids,%s' % config_dn, { 'objectClass': 'top dnaPluginConfig'.split(' '), 'cn': 'uids', 'dnatype': 'uidNumber gidNumber'.split(' '), 'dnafilter': '(objectclass=posixAccount)', 'dnascope': '%s' % DEFAULT_SUFFIX, 'dnaNextValue': '1', 'dnaMaxValue': '50', 'dnasharedcfgdn': 'ou=Ranges,%s' % DEFAULT_SUFFIX, 'dnaThreshold': '0', 'dnaRangeRequestTimeout': '60', 'dnaMagicRegen': '-1', 'dnaRemoteBindDN': 'uid=dnaAdmin,ou=People,%s' % DEFAULT_SUFFIX, 'dnaRemoteBindCred': 'secret123', 'dnaNextRange': '80-90' }) )) topology_m2.ms["master2"].add_s(Entry( ('cn=uids,%s' % config_dn, { 'objectClass': 'top dnaPluginConfig'.split(' '), 'cn': 'uids', 'dnatype': 'uidNumber gidNumber'.split(' '), 'dnafilter': '(objectclass=posixAccount)', 'dnascope': '%s' % DEFAULT_SUFFIX, 'dnaNextValue': '61', 'dnaMaxValue': '70', 'dnasharedcfgdn': 'ou=Ranges,%s' % DEFAULT_SUFFIX, 'dnaThreshold': '2', 'dnaRangeRequestTimeout': '60', 'dnaMagicRegen': '-1', 'dnaRemoteBindDN': 'uid=dnaAdmin,ou=People,%s' % DEFAULT_SUFFIX, 'dnaRemoteBindCred': 'secret123', }) )) # Enable the plugins dna_m1.enable() dna_m2.enable() # Restart the instances topology_m2.ms["master1"].restart(60) topology_m2.ms["master2"].restart(60) # Wait for a replication ..... time.sleep(40) # Allocate the 10 members to exhaust for i in range(1, 11): _create_user(topology_m2.ms["master2"], i) # Allocate the 11th > _create_user(topology_m2.ms["master2"], 11) /export/tests/tickets/ticket48916_test.py:126: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /export/tests/tickets/ticket48916_test.py:21: in _create_user inst.add_s(Entry( /usr/local/lib/python3.8/site-packages/lib389/__init__.py:176: in inner return f(ent.dn, ent.toTupleList(), *args[2:]) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:439: in add_s return self.add_ext_s(dn,modlist,None,None) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:178: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:425: in add_ext_s resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:764: in result3 resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:774: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa47363cd0> func = <built-in method result4 of LDAP object at 0x7faa4734c8d0> args = (13, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.OPERATIONS_ERROR: {'msgtype': 105, 'msgid': 13, 'result': 1, 'desc': 'Operations error', 'ctrls': [], 'info': 'Allocation of a new value for range cn=uids,cn=distributed numeric assignment plugin,cn=plugins,cn=config failed! Unable to proceed.'} /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:324: OPERATIONS_ERROR -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 005f1165-1154-47ca-b558-3716671b90d3 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ea677653-0e04-4d79-b7b5-51eba6647bda / got description=005f1165-1154-47ca-b558-3716671b90d3) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists | |||
Failed | tickets/ticket48956_test.py::test_ticket48956 | 7.05 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7faa473dbb80> def test_ticket48956(topology_st): """Write your testcase here... Also, if you need any testcase initialization, please, write additional fixture for that(include finalizer). """ topology_st.standalone.modify_s(ACCT_POLICY_PLUGIN_DN, [(ldap.MOD_REPLACE, 'nsslapd-pluginarg0', ensure_bytes(ACCT_POLICY_CONFIG_DN))]) topology_st.standalone.modify_s(ACCT_POLICY_CONFIG_DN, [(ldap.MOD_REPLACE, 'alwaysrecordlogin', b'yes'), (ldap.MOD_REPLACE, 'stateattrname', b'lastLoginTime'), (ldap.MOD_REPLACE, 'altstateattrname', b'createTimestamp'), (ldap.MOD_REPLACE, 'specattrname', b'acctPolicySubentry'), (ldap.MOD_REPLACE, 'limitattrname', b'accountInactivityLimit')]) # Enable the plugins topology_st.standalone.plugins.enable(name=PLUGIN_ACCT_POLICY) topology_st.standalone.restart(timeout=10) # Check inactivity on standard suffix (short) > _check_inactivity(topology_st, SUFFIX) /export/tests/tickets/ticket48956_test.py:107: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /export/tests/tickets/ticket48956_test.py:78: in _check_inactivity assert (_check_status(topology_st, TEST_USER_DN, b'- activated')) /export/tests/tickets/ticket48956_test.py:39: in _check_status output = subprocess.check_output([nsaccountstatus, '-Z', topology_st.standalone.serverid, /usr/lib64/python3.8/subprocess.py:411: in check_output return run(*popenargs, stdout=PIPE, timeout=timeout, check=True, /usr/lib64/python3.8/subprocess.py:489: in run with Popen(*popenargs, **kwargs) as process: /usr/lib64/python3.8/subprocess.py:854: in __init__ self._execute_child(args, executable, preexec_fn, close_fds, _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <subprocess.Popen object at 0x7faa44595490> args = ['/usr/sbin/ns-accountstatus.pl', '-Z', 'standalone1', '-D', 'cn=Directory Manager', '-w', ...] executable = b'/usr/sbin/ns-accountstatus.pl', preexec_fn = None close_fds = True, pass_fds = (), cwd = None, env = None, startupinfo = None creationflags = 0, shell = False, p2cread = -1, p2cwrite = -1, c2pread = 43 c2pwrite = 44, errread = -1, errwrite = -1, restore_signals = True start_new_session = False def _execute_child(self, args, executable, preexec_fn, close_fds, pass_fds, cwd, env, startupinfo, creationflags, shell, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, restore_signals, start_new_session): """Execute program (POSIX version)""" if isinstance(args, (str, bytes)): args = [args] elif isinstance(args, os.PathLike): if shell: raise TypeError('path-like args is not allowed when ' 'shell is true') args = [args] else: args = list(args) if shell: # On Android the default shell is at '/system/bin/sh'. unix_shell = ('/system/bin/sh' if hasattr(sys, 'getandroidapilevel') else '/bin/sh') args = [unix_shell, "-c"] + args if executable: args[0] = executable if executable is None: executable = args[0] sys.audit("subprocess.Popen", executable, args, cwd, env) if (_USE_POSIX_SPAWN and os.path.dirname(executable) and preexec_fn is None and not close_fds and not pass_fds and cwd is None and (p2cread == -1 or p2cread > 2) and (c2pwrite == -1 or c2pwrite > 2) and (errwrite == -1 or errwrite > 2) and not start_new_session): self._posix_spawn(args, executable, env, restore_signals, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) return orig_executable = executable # For transferring possible exec failure from child to parent. # Data format: "exception name:hex errno:description" # Pickle is not used; it is complex and involves memory allocation. errpipe_read, errpipe_write = os.pipe() # errpipe_write must not be in the standard io 0, 1, or 2 fd range. low_fds_to_close = [] while errpipe_write < 3: low_fds_to_close.append(errpipe_write) errpipe_write = os.dup(errpipe_write) for low_fd in low_fds_to_close: os.close(low_fd) try: try: # We must avoid complex work that could involve # malloc or free in the child process to avoid # potential deadlocks, thus we do all this here. # and pass it to fork_exec() if env is not None: env_list = [] for k, v in env.items(): k = os.fsencode(k) if b'=' in k: raise ValueError("illegal environment variable name") env_list.append(k + b'=' + os.fsencode(v)) else: env_list = None # Use execv instead of execve. executable = os.fsencode(executable) if os.path.dirname(executable): executable_list = (executable,) else: # This matches the behavior of os._execvpe(). executable_list = tuple( os.path.join(os.fsencode(dir), executable) for dir in os.get_exec_path(env)) fds_to_keep = set(pass_fds) fds_to_keep.add(errpipe_write) self.pid = _posixsubprocess.fork_exec( args, executable_list, close_fds, tuple(sorted(map(int, fds_to_keep))), cwd, env_list, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, errpipe_read, errpipe_write, restore_signals, start_new_session, preexec_fn) self._child_created = True finally: # be sure the FD is closed no matter what os.close(errpipe_write) self._close_pipe_fds(p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) # Wait for exec to fail or succeed; possibly raising an # exception (limited in size) errpipe_data = bytearray() while True: part = os.read(errpipe_read, 50000) errpipe_data += part if not part or len(errpipe_data) > 50000: break finally: # be sure the FD is closed no matter what os.close(errpipe_read) if errpipe_data: try: pid, sts = os.waitpid(self.pid, 0) if pid == self.pid: self._handle_exitstatus(sts) else: self.returncode = sys.maxsize except ChildProcessError: pass try: exception_name, hex_errno, err_msg = ( errpipe_data.split(b':', 2)) # The encoding here should match the encoding # written in by the subprocess implementations # like _posixsubprocess err_msg = err_msg.decode() except ValueError: exception_name = b'SubprocessError' hex_errno = b'0' err_msg = 'Bad exception data from child: {!r}'.format( bytes(errpipe_data)) child_exception_type = getattr( builtins, exception_name.decode('ascii'), SubprocessError) if issubclass(child_exception_type, OSError) and hex_errno: errno_num = int(hex_errno, 16) child_exec_never_called = (err_msg == "noexec") if child_exec_never_called: err_msg = "" # The error must be from chdir(cwd). err_filename = cwd else: err_filename = orig_executable if errno_num != 0: err_msg = os.strerror(errno_num) > raise child_exception_type(errno_num, err_msg, err_filename) E FileNotFoundError: [Errno 2] No such file or directory: '/usr/sbin/ns-accountstatus.pl' /usr/lib64/python3.8/subprocess.py:1702: FileNotFoundError -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48956_test:ticket48956_test.py:54 ######################### Adding Account Policy entry: cn=Account Inactivation Policy,dc=example,dc=com ###################### [32mINFO [0m tests.tickets.ticket48956_test:ticket48956_test.py:61 ######################### Adding Test User entry: uid=ticket48956user,dc=example,dc=com ###################### | |||
Failed | tickets/ticket48961_test.py::test_ticket48961_storagescheme | 0.02 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7faa4736c730> def test_ticket48961_storagescheme(topology_st): """ Test deleting of the storage scheme. """ default = topology_st.standalone.config.get_attr_val('passwordStorageScheme') # Change it topology_st.standalone.config.set('passwordStorageScheme', 'CLEAR') # Now delete it > topology_st.standalone.config.remove('passwordStorageScheme', None) /export/tests/tickets/ticket48961_test.py:28: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:316: in remove self.set(key, value, action=ldap.MOD_DELETE) /usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:446: in set return self._instance.modify_ext_s(self._dn, [(action, key, value)], /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:613: in modify_ext_s resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:764: in result3 resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:774: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa4736c6d0> func = <built-in method result4 of LDAP object at 0x7faa47378780> args = (5, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.OPERATIONS_ERROR: {'msgtype': 103, 'msgid': 5, 'result': 1, 'desc': 'Operations error', 'ctrls': [], 'info': 'passwordStorageScheme: deleting the value is not allowed.'} /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:324: OPERATIONS_ERROR -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Failed | tickets/ticket48961_test.py::test_ticket48961_deleteall | 0.00 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7faa4736c730> def test_ticket48961_deleteall(topology_st): """ Test that we can delete all valid attrs, and that a few are rejected. """ attr_to_test = { 'nsslapd-listenhost': 'localhost', 'nsslapd-securelistenhost': 'localhost', 'nsslapd-allowed-sasl-mechanisms': 'GSSAPI', 'nsslapd-svrtab': 'Some bogus data', # This one could reset? } attr_to_fail = { # These are the values that should always be dn dse.ldif too 'nsslapd-localuser': 'dirsrv', 'nsslapd-defaultnamingcontext': 'dc=example,dc=com', # Can't delete 'nsslapd-accesslog': '/opt/dirsrv/var/log/dirsrv/slapd-standalone/access', 'nsslapd-auditlog': '/opt/dirsrv/var/log/dirsrv/slapd-standalone/audit', 'nsslapd-errorlog': '/opt/dirsrv/var/log/dirsrv/slapd-standalone/errors', 'nsslapd-tmpdir': '/tmp', 'nsslapd-rundir': '/opt/dirsrv/var/run/dirsrv', 'nsslapd-bakdir': '/opt/dirsrv/var/lib/dirsrv/slapd-standalone/bak', 'nsslapd-certdir': '/opt/dirsrv/etc/dirsrv/slapd-standalone', 'nsslapd-instancedir': '/opt/dirsrv/lib/dirsrv/slapd-standalone', 'nsslapd-ldifdir': '/opt/dirsrv/var/lib/dirsrv/slapd-standalone/ldif', 'nsslapd-lockdir': '/opt/dirsrv/var/lock/dirsrv/slapd-standalone', 'nsslapd-schemadir': '/opt/dirsrv/etc/dirsrv/slapd-standalone/schema', 'nsslapd-workingdir': '/opt/dirsrv/var/log/dirsrv/slapd-standalone', 'nsslapd-localhost': 'localhost.localdomain', # These can't be reset, but might be in dse.ldif. Probably in libglobs. 'nsslapd-certmap-basedn': 'cn=certmap,cn=config', 'nsslapd-port': '38931', # Can't delete 'nsslapd-secureport': '636', # Can't delete 'nsslapd-conntablesize': '1048576', 'nsslapd-rootpw': '{SSHA512}...', # These are hardcoded server magic. 'nsslapd-hash-filters': 'off', # Can't delete 'nsslapd-requiresrestart': 'cn=config:nsslapd-port', # Can't change 'nsslapd-plugin': 'cn=case ignore string syntax,cn=plugins,cn=config', # Can't change 'nsslapd-privatenamespaces': 'cn=schema', # Can't change 'nsslapd-allowed-to-delete-attrs': 'None', # Can't delete 'nsslapd-accesslog-list': 'List!', # Can't delete 'nsslapd-auditfaillog-list': 'List!', 'nsslapd-auditlog-list': 'List!', 'nsslapd-errorlog-list': 'List!', 'nsslapd-config': 'cn=config', 'nsslapd-versionstring': '389-Directory/1.3.6.0', 'objectclass': '', 'cn': '', # These are the odd values 'nsslapd-backendconfig': 'cn=config,cn=userRoot,cn=ldbm database,cn=plugins,cn=config', # Doesn't exist? 'nsslapd-betype': 'ldbm database', # Doesn't exist? 'nsslapd-connection-buffer': 1, # Has an ldap problem 'nsslapd-malloc-mmap-threshold': '-10', # Defunct anyway 'nsslapd-malloc-mxfast': '-10', 'nsslapd-malloc-trim-threshold': '-10', 'nsslapd-referralmode': '', 'nsslapd-saslpath': '', 'passwordadmindn': '', } > config_entry = topology_st.standalone.config.raw_entry() /export/tests/tickets/ticket48961_test.py:101: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.config.Config object at 0x7faa473783d0>, name = 'raw_entry' def __getattr__(self, name): """This enables a bit of magic to allow us to wrap any function ending with _json to it's form without json, then transformed. It means your function *must* return it's values as a dict of: { attr : [val, val, ...], attr : [], ... } to be supported. """ if (name.endswith('_json')): int_name = name.replace('_json', '') pfunc = partial(self._jsonify, getattr(self, int_name)) return pfunc else: > raise AttributeError("'%s' object has no attribute '%s'" % (self.__class__.__name__, name)) E AttributeError: 'Config' object has no attribute 'raw_entry' /usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:199: AttributeError | |||
Failed | tickets/ticket49039_test.py::test_ticket49039 | 12.87 | |
topo = <lib389.topologies.TopologyMain object at 0x7faa47318610> def test_ticket49039(topo): """Test "password must change" verses "password min age". Min age should not block password update if the password was reset. """ # Setup SSL (for ldappasswd test) topo.standalone.enable_tls() # Configure password policy try: policy = PwPolicyManager(topo.standalone) policy.set_global_policy(properties={'nsslapd-pwpolicy-local': 'on', 'passwordMustChange': 'on', 'passwordExp': 'on', 'passwordMaxAge': '86400000', 'passwordMinAge': '8640000', 'passwordChange': 'on'}) except ldap.LDAPError as e: log.fatal('Failed to set password policy: ' + str(e)) # Add user, bind, and set password try: topo.standalone.add_s(Entry((USER_DN, { 'objectclass': 'top extensibleObject'.split(), 'uid': 'user1', 'userpassword': PASSWORD }))) except ldap.LDAPError as e: log.fatal('Failed to add user: error ' + e.args[0]['desc']) assert False # Reset password as RootDN try: topo.standalone.modify_s(USER_DN, [(ldap.MOD_REPLACE, 'userpassword', ensure_bytes(PASSWORD))]) except ldap.LDAPError as e: log.fatal('Failed to bind: error ' + e.args[0]['desc']) assert False time.sleep(1) # Reset password as user try: topo.standalone.simple_bind_s(USER_DN, PASSWORD) except ldap.LDAPError as e: log.fatal('Failed to bind: error ' + e.args[0]['desc']) assert False try: > topo.standalone.modify_s(USER_DN, [(ldap.MOD_REPLACE, 'userpassword', ensure_bytes(PASSWORD))]) /export/tests/tickets/ticket49039_test.py:75: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = ('uid=user,dc=example,dc=com', [(2, 'userpassword', b'password')]) kwargs = {} c_stack = [FrameInfo(frame=<frame at 0x7faa541b1640, file '/usr/local/lib/python3.8/site-packages/lib389/__init__.py', line 180,...93, function='_hookexec', code_context=[' return self._inner_hookexec(hook, methods, kwargs)\n'], index=0), ...] frame = FrameInfo(frame=<frame at 0x55ebeb2d1a90, file '/export/tests/tickets/ticket49039_test.py', line 78, code test_ticket4...[" topo.standalone.modify_s(USER_DN, [(ldap.MOD_REPLACE, 'userpassword', ensure_bytes(PASSWORD))])\n"], index=0) def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa473185b0> dn = 'uid=user,dc=example,dc=com', modlist = [(2, 'userpassword', b'password')] def modify_s(self,dn,modlist): > return self.modify_ext_s(dn,modlist,None,None) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:640: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = ('uid=user,dc=example,dc=com', [(2, 'userpassword', b'password')], None, None) kwargs = {} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa473185b0> dn = 'uid=user,dc=example,dc=com', modlist = [(2, 'userpassword', b'password')] serverctrls = None, clientctrls = None def modify_ext_s(self,dn,modlist,serverctrls=None,clientctrls=None): msgid = self.modify_ext(dn,modlist,serverctrls,clientctrls) > resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:613: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (7,), kwargs = {'all': 1, 'timeout': -1} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa473185b0>, msgid = 7, all = 1 timeout = -1, resp_ctrl_classes = None def result3(self,msgid=ldap.RES_ANY,all=1,timeout=None,resp_ctrl_classes=None): > resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( msgid,all,timeout, add_ctrls=0,add_intermediates=0,add_extop=0, resp_ctrl_classes=resp_ctrl_classes ) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:764: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (7, 1, -1) kwargs = {'add_ctrls': 0, 'add_extop': 0, 'add_intermediates': 0, 'resp_ctrl_classes': None} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa473185b0>, msgid = 7, all = 1 timeout = -1, add_ctrls = 0, add_intermediates = 0, add_extop = 0 resp_ctrl_classes = None def result4(self,msgid=ldap.RES_ANY,all=1,timeout=None,add_ctrls=0,add_intermediates=0,add_extop=0,resp_ctrl_classes=None): if timeout is None: timeout = self.timeout > ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:774: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (<built-in method result4 of LDAP object at 0x7faa47318210>, 7, 1, -1, 0, 0, ...) kwargs = {} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa473185b0> func = <built-in method result4 of LDAP object at 0x7faa47318210> args = (7, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: result = func(*args,**kwargs) if __debug__ and self._trace_level>=2: if func.__name__!="unbind_ext": diagnostic_message_success = self._l.get_option(ldap.OPT_DIAGNOSTIC_MESSAGE) finally: self._ldap_object_lock.release() except LDAPError as e: exc_type,exc_value,exc_traceback = sys.exc_info() try: if 'info' not in e.args[0] and 'errno' in e.args[0]: e.args[0]['info'] = strerror(e.args[0]['errno']) except IndexError: pass if __debug__ and self._trace_level>=2: self._trace_file.write('=> LDAPError - %s: %s\n' % (e.__class__.__name__,str(e))) try: > reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:340: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ exc_type = <class 'ldap.INSUFFICIENT_ACCESS'> exc_value = INSUFFICIENT_ACCESS({'msgtype': 103, 'msgid': 7, 'result': 50, 'desc': 'Insufficient access', 'ctrls': [], 'info': "Insufficient 'write' privilege to the 'userPassword' attribute of entry 'uid=user,dc=example,dc=com'.\n"}) exc_traceback = <traceback object at 0x7faa466323c0> def reraise(exc_type, exc_value, exc_traceback): """Re-raise an exception given information from sys.exc_info() Note that unlike six.reraise, this does not support replacing the traceback. All arguments must come from a single sys.exc_info() call. """ # In Python 3, all exception info is contained in one object. > raise exc_value /usr/lib64/python3.8/site-packages/ldap/compat.py:46: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa473185b0> func = <built-in method result4 of LDAP object at 0x7faa47318210> args = (7, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.INSUFFICIENT_ACCESS: {'msgtype': 103, 'msgid': 7, 'result': 50, 'desc': 'Insufficient access', 'ctrls': [], 'info': "Insufficient 'write' privilege to the 'userPassword' attribute of entry 'uid=user,dc=example,dc=com'.\n"} /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:324: INSUFFICIENT_ACCESS During handling of the above exception, another exception occurred: topo = <lib389.topologies.TopologyMain object at 0x7faa47318610> def test_ticket49039(topo): """Test "password must change" verses "password min age". Min age should not block password update if the password was reset. """ # Setup SSL (for ldappasswd test) topo.standalone.enable_tls() # Configure password policy try: policy = PwPolicyManager(topo.standalone) policy.set_global_policy(properties={'nsslapd-pwpolicy-local': 'on', 'passwordMustChange': 'on', 'passwordExp': 'on', 'passwordMaxAge': '86400000', 'passwordMinAge': '8640000', 'passwordChange': 'on'}) except ldap.LDAPError as e: log.fatal('Failed to set password policy: ' + str(e)) # Add user, bind, and set password try: topo.standalone.add_s(Entry((USER_DN, { 'objectclass': 'top extensibleObject'.split(), 'uid': 'user1', 'userpassword': PASSWORD }))) except ldap.LDAPError as e: log.fatal('Failed to add user: error ' + e.args[0]['desc']) assert False # Reset password as RootDN try: topo.standalone.modify_s(USER_DN, [(ldap.MOD_REPLACE, 'userpassword', ensure_bytes(PASSWORD))]) except ldap.LDAPError as e: log.fatal('Failed to bind: error ' + e.args[0]['desc']) assert False time.sleep(1) # Reset password as user try: topo.standalone.simple_bind_s(USER_DN, PASSWORD) except ldap.LDAPError as e: log.fatal('Failed to bind: error ' + e.args[0]['desc']) assert False try: topo.standalone.modify_s(USER_DN, [(ldap.MOD_REPLACE, 'userpassword', ensure_bytes(PASSWORD))]) except ldap.LDAPError as e: log.fatal('Failed to change password: error ' + e.args[0]['desc']) > assert False E assert False /export/tests/tickets/ticket49039_test.py:78: AssertionError -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [31mCRITICAL[0m tests.tickets.ticket49039_test:ticket49039_test.py:77 Failed to change password: error Insufficient access | |||
Failed | tickets/ticket49072_test.py::test_ticket49072_basedn | 4.96 | |
topo = <lib389.topologies.TopologyMain object at 0x7faa477ef250> def test_ticket49072_basedn(topo): """memberOf fixup task does not validate args :id: dce9b898-119d-42b8-a236-1130e59bfe18 :feature: memberOf :setup: Standalone instance, with memberOf plugin :steps: 1. Run fixup-memberOf.pl with invalid DN entry 2. Check if error log reports "Failed to get be backend" :expectedresults: Fixup-memberOf.pl task should complete, but errors logged. """ log.info("Ticket 49072 memberof fixup task with invalid basedn...") topo.standalone.plugins.enable(name=PLUGIN_MEMBER_OF) topo.standalone.restart(timeout=10) if ds_is_older('1.3'): inst_dir = topo.standalone.get_inst_dir() memof_task = os.path.join(inst_dir, FIXUP_MEMOF) try: output = subprocess.check_output([memof_task, '-D', DN_DM, '-w', PASSWORD, '-b', TEST_BASEDN, '-f', FILTER]) except subprocess.CalledProcessError as err: output = err.output else: sbin_dir = topo.standalone.get_sbin_dir() memof_task = os.path.join(sbin_dir, FIXUP_MEMOF) try: > output = subprocess.check_output( [memof_task, '-D', DN_DM, '-w', PASSWORD, '-b', TEST_BASEDN, '-Z', SERVERID_STANDALONE, '-f', FILTER]) /export/tests/tickets/ticket49072_test.py:55: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.8/subprocess.py:411: in check_output return run(*popenargs, stdout=PIPE, timeout=timeout, check=True, /usr/lib64/python3.8/subprocess.py:489: in run with Popen(*popenargs, **kwargs) as process: /usr/lib64/python3.8/subprocess.py:854: in __init__ self._execute_child(args, executable, preexec_fn, close_fds, _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <subprocess.Popen object at 0x7faa476a80a0> args = ['/usr/sbin/fixup-memberof.pl', '-D', 'cn=Directory Manager', '-w', 'password', '-b', ...] executable = b'/usr/sbin/fixup-memberof.pl', preexec_fn = None, close_fds = True pass_fds = (), cwd = None, env = None, startupinfo = None, creationflags = 0 shell = False, p2cread = -1, p2cwrite = -1, c2pread = 48, c2pwrite = 49 errread = -1, errwrite = -1, restore_signals = True, start_new_session = False def _execute_child(self, args, executable, preexec_fn, close_fds, pass_fds, cwd, env, startupinfo, creationflags, shell, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, restore_signals, start_new_session): """Execute program (POSIX version)""" if isinstance(args, (str, bytes)): args = [args] elif isinstance(args, os.PathLike): if shell: raise TypeError('path-like args is not allowed when ' 'shell is true') args = [args] else: args = list(args) if shell: # On Android the default shell is at '/system/bin/sh'. unix_shell = ('/system/bin/sh' if hasattr(sys, 'getandroidapilevel') else '/bin/sh') args = [unix_shell, "-c"] + args if executable: args[0] = executable if executable is None: executable = args[0] sys.audit("subprocess.Popen", executable, args, cwd, env) if (_USE_POSIX_SPAWN and os.path.dirname(executable) and preexec_fn is None and not close_fds and not pass_fds and cwd is None and (p2cread == -1 or p2cread > 2) and (c2pwrite == -1 or c2pwrite > 2) and (errwrite == -1 or errwrite > 2) and not start_new_session): self._posix_spawn(args, executable, env, restore_signals, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) return orig_executable = executable # For transferring possible exec failure from child to parent. # Data format: "exception name:hex errno:description" # Pickle is not used; it is complex and involves memory allocation. errpipe_read, errpipe_write = os.pipe() # errpipe_write must not be in the standard io 0, 1, or 2 fd range. low_fds_to_close = [] while errpipe_write < 3: low_fds_to_close.append(errpipe_write) errpipe_write = os.dup(errpipe_write) for low_fd in low_fds_to_close: os.close(low_fd) try: try: # We must avoid complex work that could involve # malloc or free in the child process to avoid # potential deadlocks, thus we do all this here. # and pass it to fork_exec() if env is not None: env_list = [] for k, v in env.items(): k = os.fsencode(k) if b'=' in k: raise ValueError("illegal environment variable name") env_list.append(k + b'=' + os.fsencode(v)) else: env_list = None # Use execv instead of execve. executable = os.fsencode(executable) if os.path.dirname(executable): executable_list = (executable,) else: # This matches the behavior of os._execvpe(). executable_list = tuple( os.path.join(os.fsencode(dir), executable) for dir in os.get_exec_path(env)) fds_to_keep = set(pass_fds) fds_to_keep.add(errpipe_write) self.pid = _posixsubprocess.fork_exec( args, executable_list, close_fds, tuple(sorted(map(int, fds_to_keep))), cwd, env_list, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, errpipe_read, errpipe_write, restore_signals, start_new_session, preexec_fn) self._child_created = True finally: # be sure the FD is closed no matter what os.close(errpipe_write) self._close_pipe_fds(p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) # Wait for exec to fail or succeed; possibly raising an # exception (limited in size) errpipe_data = bytearray() while True: part = os.read(errpipe_read, 50000) errpipe_data += part if not part or len(errpipe_data) > 50000: break finally: # be sure the FD is closed no matter what os.close(errpipe_read) if errpipe_data: try: pid, sts = os.waitpid(self.pid, 0) if pid == self.pid: self._handle_exitstatus(sts) else: self.returncode = sys.maxsize except ChildProcessError: pass try: exception_name, hex_errno, err_msg = ( errpipe_data.split(b':', 2)) # The encoding here should match the encoding # written in by the subprocess implementations # like _posixsubprocess err_msg = err_msg.decode() except ValueError: exception_name = b'SubprocessError' hex_errno = b'0' err_msg = 'Bad exception data from child: {!r}'.format( bytes(errpipe_data)) child_exception_type = getattr( builtins, exception_name.decode('ascii'), SubprocessError) if issubclass(child_exception_type, OSError) and hex_errno: errno_num = int(hex_errno, 16) child_exec_never_called = (err_msg == "noexec") if child_exec_never_called: err_msg = "" # The error must be from chdir(cwd). err_filename = cwd else: err_filename = orig_executable if errno_num != 0: err_msg = os.strerror(errno_num) > raise child_exception_type(errno_num, err_msg, err_filename) E FileNotFoundError: [Errno 2] No such file or directory: '/usr/sbin/fixup-memberof.pl' /usr/lib64/python3.8/subprocess.py:1702: FileNotFoundError -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket49072_test:ticket49072_test.py:40 Ticket 49072 memberof fixup task with invalid basedn... | |||
Failed | tickets/ticket49072_test.py::test_ticket49072_filter | 10.04 | |
topo = <lib389.topologies.TopologyMain object at 0x7faa477ef250> def test_ticket49072_filter(topo): """memberOf fixup task does not validate args :id: dde9e893-119d-42c8-a236-1190e56bfe98 :feature: memberOf :setup: Standalone instance, with memberOf plugin :steps: 1. Run fixup-memberOf.pl with invalid filter 2. Check if error log reports "Bad search filter" :expectedresults: Fixup-memberOf.pl task should complete, but errors logged. """ log.info("Ticket 49072 memberof fixup task with invalid filter...") log.info('Wait for 10 secs and check if task is completed') time.sleep(10) task_memof = 'cn=memberOf task,cn=tasks,cn=config' if topo.standalone.search_s(task_memof, ldap.SCOPE_SUBTREE, 'cn=memberOf_fixup*', ['dn:']): log.info('memberof task is still running, wait for +10 secs') time.sleep(10) if ds_is_older('1.3'): inst_dir = topo.standalone.get_inst_dir() memof_task = os.path.join(inst_dir, FIXUP_MEMOF) try: output = subprocess.check_output([memof_task, '-D', DN_DM, '-w', PASSWORD, '-b', SUFFIX, '-f', TEST_FILTER]) except subprocess.CalledProcessError as err: output = err.output else: sbin_dir = topo.standalone.get_sbin_dir() memof_task = os.path.join(sbin_dir, FIXUP_MEMOF) try: > output = subprocess.check_output( [memof_task, '-D', DN_DM, '-w', PASSWORD, '-b', SUFFIX, '-Z', SERVERID_STANDALONE, '-f', TEST_FILTER]) /export/tests/tickets/ticket49072_test.py:96: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.8/subprocess.py:411: in check_output return run(*popenargs, stdout=PIPE, timeout=timeout, check=True, /usr/lib64/python3.8/subprocess.py:489: in run with Popen(*popenargs, **kwargs) as process: /usr/lib64/python3.8/subprocess.py:854: in __init__ self._execute_child(args, executable, preexec_fn, close_fds, _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <subprocess.Popen object at 0x7faa476a3340> args = ['/usr/sbin/fixup-memberof.pl', '-D', 'cn=Directory Manager', '-w', 'password', '-b', ...] executable = b'/usr/sbin/fixup-memberof.pl', preexec_fn = None, close_fds = True pass_fds = (), cwd = None, env = None, startupinfo = None, creationflags = 0 shell = False, p2cread = -1, p2cwrite = -1, c2pread = 48, c2pwrite = 49 errread = -1, errwrite = -1, restore_signals = True, start_new_session = False def _execute_child(self, args, executable, preexec_fn, close_fds, pass_fds, cwd, env, startupinfo, creationflags, shell, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, restore_signals, start_new_session): """Execute program (POSIX version)""" if isinstance(args, (str, bytes)): args = [args] elif isinstance(args, os.PathLike): if shell: raise TypeError('path-like args is not allowed when ' 'shell is true') args = [args] else: args = list(args) if shell: # On Android the default shell is at '/system/bin/sh'. unix_shell = ('/system/bin/sh' if hasattr(sys, 'getandroidapilevel') else '/bin/sh') args = [unix_shell, "-c"] + args if executable: args[0] = executable if executable is None: executable = args[0] sys.audit("subprocess.Popen", executable, args, cwd, env) if (_USE_POSIX_SPAWN and os.path.dirname(executable) and preexec_fn is None and not close_fds and not pass_fds and cwd is None and (p2cread == -1 or p2cread > 2) and (c2pwrite == -1 or c2pwrite > 2) and (errwrite == -1 or errwrite > 2) and not start_new_session): self._posix_spawn(args, executable, env, restore_signals, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) return orig_executable = executable # For transferring possible exec failure from child to parent. # Data format: "exception name:hex errno:description" # Pickle is not used; it is complex and involves memory allocation. errpipe_read, errpipe_write = os.pipe() # errpipe_write must not be in the standard io 0, 1, or 2 fd range. low_fds_to_close = [] while errpipe_write < 3: low_fds_to_close.append(errpipe_write) errpipe_write = os.dup(errpipe_write) for low_fd in low_fds_to_close: os.close(low_fd) try: try: # We must avoid complex work that could involve # malloc or free in the child process to avoid # potential deadlocks, thus we do all this here. # and pass it to fork_exec() if env is not None: env_list = [] for k, v in env.items(): k = os.fsencode(k) if b'=' in k: raise ValueError("illegal environment variable name") env_list.append(k + b'=' + os.fsencode(v)) else: env_list = None # Use execv instead of execve. executable = os.fsencode(executable) if os.path.dirname(executable): executable_list = (executable,) else: # This matches the behavior of os._execvpe(). executable_list = tuple( os.path.join(os.fsencode(dir), executable) for dir in os.get_exec_path(env)) fds_to_keep = set(pass_fds) fds_to_keep.add(errpipe_write) self.pid = _posixsubprocess.fork_exec( args, executable_list, close_fds, tuple(sorted(map(int, fds_to_keep))), cwd, env_list, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, errpipe_read, errpipe_write, restore_signals, start_new_session, preexec_fn) self._child_created = True finally: # be sure the FD is closed no matter what os.close(errpipe_write) self._close_pipe_fds(p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) # Wait for exec to fail or succeed; possibly raising an # exception (limited in size) errpipe_data = bytearray() while True: part = os.read(errpipe_read, 50000) errpipe_data += part if not part or len(errpipe_data) > 50000: break finally: # be sure the FD is closed no matter what os.close(errpipe_read) if errpipe_data: try: pid, sts = os.waitpid(self.pid, 0) if pid == self.pid: self._handle_exitstatus(sts) else: self.returncode = sys.maxsize except ChildProcessError: pass try: exception_name, hex_errno, err_msg = ( errpipe_data.split(b':', 2)) # The encoding here should match the encoding # written in by the subprocess implementations # like _posixsubprocess err_msg = err_msg.decode() except ValueError: exception_name = b'SubprocessError' hex_errno = b'0' err_msg = 'Bad exception data from child: {!r}'.format( bytes(errpipe_data)) child_exception_type = getattr( builtins, exception_name.decode('ascii'), SubprocessError) if issubclass(child_exception_type, OSError) and hex_errno: errno_num = int(hex_errno, 16) child_exec_never_called = (err_msg == "noexec") if child_exec_never_called: err_msg = "" # The error must be from chdir(cwd). err_filename = cwd else: err_filename = orig_executable if errno_num != 0: err_msg = os.strerror(errno_num) > raise child_exception_type(errno_num, err_msg, err_filename) E FileNotFoundError: [Errno 2] No such file or directory: '/usr/sbin/fixup-memberof.pl' /usr/lib64/python3.8/subprocess.py:1702: FileNotFoundError -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket49072_test:ticket49072_test.py:77 Ticket 49072 memberof fixup task with invalid filter... [32mINFO [0m tests.tickets.ticket49072_test:ticket49072_test.py:78 Wait for 10 secs and check if task is completed | |||
Failed | tickets/ticket49073_test.py::test_ticket49073 | 8.56 | |
topology_m2 = <lib389.topologies.TopologyMain object at 0x7faa47122d60> def test_ticket49073(topology_m2): """Write your replication test here. To access each DirSrv instance use: topology_m2.ms["master1"], topology_m2.ms["master2"], ..., topology_m2.hub1, ..., topology_m2.consumer1,... Also, if you need any testcase initialization, please, write additional fixture for that(include finalizer). """ topology_m2.ms["master1"].plugins.enable(name=PLUGIN_MEMBER_OF) topology_m2.ms["master1"].restart(timeout=10) topology_m2.ms["master2"].plugins.enable(name=PLUGIN_MEMBER_OF) topology_m2.ms["master2"].restart(timeout=10) # Configure fractional to prevent total init to send memberof ents = topology_m2.ms["master1"].agreement.list(suffix=SUFFIX) assert len(ents) == 1 log.info('update %s to add nsDS5ReplicatedAttributeListTotal' % ents[0].dn) > topology_m2.ms["master1"].modify_s(ents[0].dn, [(ldap.MOD_REPLACE, 'nsDS5ReplicatedAttributeListTotal', '(objectclass=*) $ EXCLUDE '), (ldap.MOD_REPLACE, 'nsDS5ReplicatedAttributeList', '(objectclass=*) $ EXCLUDE memberOf')]) /export/tests/tickets/ticket49073_test.py:97: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:640: in modify_s return self.modify_ext_s(dn,modlist,None,None) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:612: in modify_ext_s msgid = self.modify_ext(dn,modlist,serverctrls,clientctrls) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:609: in modify_ext return self._ldap_call(self._l.modify_ext,dn,modlist,RequestControlTuples(serverctrls),RequestControlTuples(clientctrls)) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa47122d00> func = <built-in method modify_ext of LDAP object at 0x7faa471144e0> args = ('cn=002,cn=replica,cn=dc\\3Dexample\\2Cdc\\3Dcom,cn=mapping tree,cn=config', [(2, 'nsDS5ReplicatedAttributeListTotal', '(objectclass=*) $ EXCLUDE '), (2, 'nsDS5ReplicatedAttributeList', '(objectclass=*) $ EXCLUDE memberOf')], None, None) kwargs = {}, diagnostic_message_success = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E TypeError: ('Tuple_to_LDAPMod(): expected a byte string in the list', '(') /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:324: TypeError -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect d7815b88-1077-434f-aa99-d1478b076b5c / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 3968a9e9-ae38-4811-8786-3783e5434e52 / got description=d7815b88-1077-434f-aa99-d1478b076b5c) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket49073_test:ticket49073_test.py:96 update cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config to add nsDS5ReplicatedAttributeListTotal | |||
Failed | tickets/ticket49104_test.py::test_ticket49104_setup | 0.00 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7faa540b9af0> def test_ticket49104_setup(topology_st): """ Generate an ldif file having 10K entries and import it. """ # Generate a test ldif (100k entries) ldif_dir = topology_st.standalone.get_ldif_dir() import_ldif = ldif_dir + '/49104.ldif' try: > topology_st.standalone.buildLDIF(100000, import_ldif) /export/tests/tickets/ticket49104_test.py:30: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa540b9670>, num = 100000 ldif_file = '/var/lib/dirsrv/slapd-standalone1/ldif/49104.ldif' suffix = 'dc=example,dc=com' def buildLDIF(self, num, ldif_file, suffix='dc=example,dc=com'): """Generate a simple ldif file using the dbgen.pl script, and set the ownership and permissions to match the user that the server runs as. @param num - number of entries to create @param ldif_file - ldif file name(including the path) @suffix - DN of the parent entry in the ldif file @return - nothing @raise - OSError """ > raise Exception("Perl tools disabled on this system. Try dbgen py module.") E Exception: Perl tools disabled on this system. Try dbgen py module. /usr/local/lib/python3.8/site-packages/lib389/__init__.py:3236: Exception -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Failed | tickets/ticket49192_test.py::test_ticket49192 | 0.00 | |
topo = <lib389.topologies.TopologyMain object at 0x7faa46fcfbe0> def test_ticket49192(topo): """Trigger deadlock when removing suffix """ # # Create a second suffix/backend # log.info('Creating second backend...') > topo.standalone.backends.create(None, properties={ BACKEND_NAME: "Second_Backend", 'suffix': "o=hang.com", }) /export/tests/tickets/ticket49192_test.py:35: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:1169: in create return co.create(rdn, properties, self._basedn) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.backend.Backend object at 0x7faa46fcfb80>, dn = None properties = {'name': 'Second_Backend', 'suffix': 'o=hang.com'} basedn = 'cn=ldbm database,cn=plugins,cn=config' def create(self, dn=None, properties=None, basedn=DN_LDBM): """Add a new backend entry, create mapping tree, and, if requested, sample entries :param dn: DN of the new entry :type dn: str :param properties: Attributes and parameters for the new entry :type properties: dict :param basedn: Base DN of the new entry :type basedn: str :returns: DSLdapObject of the created entry """ sample_entries = False parent_suffix = False # normalize suffix (remove spaces between comps) if dn is not None: dn_comps = ldap.dn.explode_dn(dn.lower()) dn = ",".join(dn_comps) if properties is not None: > suffix_dn = properties['nsslapd-suffix'].lower() E KeyError: 'nsslapd-suffix' /usr/local/lib/python3.8/site-packages/lib389/backend.py:609: KeyError -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket49192_test:ticket49192_test.py:34 Creating second backend... | |||
Failed | tickets/ticket49287_test.py::test_ticket49287 | 14.42 | |
self = <lib389.mappingTree.MappingTreeLegacy object at 0x7faa475bcee0> suffix = 'dc=test,dc=com', bename = 'test', parent = None def create(self, suffix=None, bename=None, parent=None): ''' Create a mapping tree entry (under "cn=mapping tree,cn=config"), for the 'suffix' and that is stored in 'bename' backend. 'bename' backend must exist before creating the mapping tree entry. If a 'parent' is provided that means that we are creating a sub-suffix mapping tree. @param suffix - suffix mapped by this mapping tree entry. It will be the common name ('cn') of the entry @param benamebase - backend common name (e.g. 'userRoot') @param parent - if provided is a parent suffix of 'suffix' @return DN of the mapping tree entry @raise ldap.NO_SUCH_OBJECT - if the backend entry or parent mapping tree does not exist ValueError - if missing a parameter, ''' # Check suffix is provided if not suffix: raise ValueError("suffix is mandatory") else: nsuffix = normalizeDN(suffix) # Check backend name is provided if not bename: raise ValueError("backend name is mandatory") # Check that if the parent suffix is provided then # it exists a mapping tree for it if parent: nparent = normalizeDN(parent) filt = suffixfilt(parent) try: entry = self.conn.getEntry(DN_MAPPING_TREE, ldap.SCOPE_SUBTREE, filt) pass except NoSuchEntryError: raise ValueError("parent suffix has no mapping tree") else: nparent = "" # Check if suffix exists, return filt = suffixfilt(suffix) try: entry = self.conn.getEntry(DN_MAPPING_TREE, ldap.SCOPE_SUBTREE, filt) return entry except ldap.NO_SUCH_OBJECT: entry = None # # Now start the real work # # fix me when we can actually used escaped DNs dn = ','.join(('cn="%s"' % nsuffix, DN_MAPPING_TREE)) entry = Entry(dn) entry.update({ 'objectclass': ['top', 'extensibleObject', MT_OBJECTCLASS_VALUE], 'nsslapd-state': 'backend', # the value in the dn has to be DN escaped # internal code will add the quoted value - unquoted value is # useful for searching. MT_PROPNAME_TO_ATTRNAME[MT_SUFFIX]: nsuffix, MT_PROPNAME_TO_ATTRNAME[MT_BACKEND]: bename }) # possibly add the parent if parent: entry.setValues(MT_PROPNAME_TO_ATTRNAME[MT_PARENT_SUFFIX], nparent) try: self.log.debug("Creating entry: %s", entry.dn) self.log.info("Entry %r", entry) > self.conn.add_s(entry) /usr/local/lib/python3.8/site-packages/lib389/mappingTree.py:155: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (dn: cn="dc=test,dc=com",cn=mapping tree,cn=config cn: dc=test,dc=com nsslapd-backend: test nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree ,) kwargs = {} c_stack = [FrameInfo(frame=<frame at 0x7faa4707cc40, file '/usr/local/lib/python3.8/site-packages/lib389/__init__.py', line 176,...neno=187, function='_multicall', code_context=[' res = hook_impl.function(*args)\n'], index=0), ...] frame = FrameInfo(frame=<frame at 0x55ebeb79de40, file '/usr/local/lib/python3.8/site-packages/lib389/mappingTree.py', line 15.../lib389/mappingTree.py', lineno=155, function='create', code_context=[' self.conn.add_s(entry)\n'], index=0) ent = dn: cn="dc=test,dc=com",cn=mapping tree,cn=config cn: dc=test,dc=com nsslapd-backend: test nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): > return f(ent.dn, ent.toTupleList(), *args[2:]) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:176: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa4703b9d0> dn = 'cn="dc=test,dc=com",cn=mapping tree,cn=config' modlist = [('objectclass', [b'top', b'extensibleObject', b'nsMappingTree']), ('nsslapd-state', [b'backend']), ('cn', [b'dc=test,dc=com']), ('nsslapd-backend', [b'test'])] def add_s(self,dn,modlist): > return self.add_ext_s(dn,modlist,None,None) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:439: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = ('cn="dc=test,dc=com",cn=mapping tree,cn=config', [('objectclass', [b'top', b'extensibleObject', b'nsMappingTree']), ('nsslapd-state', [b'backend']), ('cn', [b'dc=test,dc=com']), ('nsslapd-backend', [b'test'])], None, None) kwargs = {}, ent = 'cn="dc=test,dc=com",cn=mapping tree,cn=config' def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: > return f(*args, **kwargs) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:178: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa4703b9d0> dn = 'cn="dc=test,dc=com",cn=mapping tree,cn=config' modlist = [('objectclass', [b'top', b'extensibleObject', b'nsMappingTree']), ('nsslapd-state', [b'backend']), ('cn', [b'dc=test,dc=com']), ('nsslapd-backend', [b'test'])] serverctrls = None, clientctrls = None def add_ext_s(self,dn,modlist,serverctrls=None,clientctrls=None): msgid = self.add_ext(dn,modlist,serverctrls,clientctrls) > resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:425: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (4,), kwargs = {'all': 1, 'timeout': -1} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa4703b9d0>, msgid = 4, all = 1 timeout = -1, resp_ctrl_classes = None def result3(self,msgid=ldap.RES_ANY,all=1,timeout=None,resp_ctrl_classes=None): > resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( msgid,all,timeout, add_ctrls=0,add_intermediates=0,add_extop=0, resp_ctrl_classes=resp_ctrl_classes ) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:764: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (4, 1, -1) kwargs = {'add_ctrls': 0, 'add_extop': 0, 'add_intermediates': 0, 'resp_ctrl_classes': None} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa4703b9d0>, msgid = 4, all = 1 timeout = -1, add_ctrls = 0, add_intermediates = 0, add_extop = 0 resp_ctrl_classes = None def result4(self,msgid=ldap.RES_ANY,all=1,timeout=None,add_ctrls=0,add_intermediates=0,add_extop=0,resp_ctrl_classes=None): if timeout is None: timeout = self.timeout > ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:774: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (<built-in method result4 of LDAP object at 0x7faa47594870>, 4, 1, -1, 0, 0, ...) kwargs = {} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa4703b9d0> func = <built-in method result4 of LDAP object at 0x7faa47594870> args = (4, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: result = func(*args,**kwargs) if __debug__ and self._trace_level>=2: if func.__name__!="unbind_ext": diagnostic_message_success = self._l.get_option(ldap.OPT_DIAGNOSTIC_MESSAGE) finally: self._ldap_object_lock.release() except LDAPError as e: exc_type,exc_value,exc_traceback = sys.exc_info() try: if 'info' not in e.args[0] and 'errno' in e.args[0]: e.args[0]['info'] = strerror(e.args[0]['errno']) except IndexError: pass if __debug__ and self._trace_level>=2: self._trace_file.write('=> LDAPError - %s: %s\n' % (e.__class__.__name__,str(e))) try: > reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:340: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ exc_type = <class 'ldap.UNWILLING_TO_PERFORM'> exc_value = UNWILLING_TO_PERFORM({'msgtype': 105, 'msgid': 4, 'result': 53, 'desc': 'Server is unwilling to perform', 'ctrls': []}) exc_traceback = <traceback object at 0x7faa472b8940> def reraise(exc_type, exc_value, exc_traceback): """Re-raise an exception given information from sys.exc_info() Note that unlike six.reraise, this does not support replacing the traceback. All arguments must come from a single sys.exc_info() call. """ # In Python 3, all exception info is contained in one object. > raise exc_value /usr/lib64/python3.8/site-packages/ldap/compat.py:46: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa4703b9d0> func = <built-in method result4 of LDAP object at 0x7faa47594870> args = (4, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.UNWILLING_TO_PERFORM: {'msgtype': 105, 'msgid': 4, 'result': 53, 'desc': 'Server is unwilling to perform', 'ctrls': []} /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:324: UNWILLING_TO_PERFORM During handling of the above exception, another exception occurred: topology_m2 = <lib389.topologies.TopologyMain object at 0x7faa4703b0d0> def test_ticket49287(topology_m2): """ test case for memberof and conflict entries """ # return M1 = topology_m2.ms["master1"] M2 = topology_m2.ms["master2"] config_memberof(M1) config_memberof(M2) _enable_spec_logging(M1) _enable_spec_logging(M2) _disable_nunc_stans(M1) _disable_nunc_stans(M2) M1.restart(timeout=10) M2.restart(timeout=10) testbase = 'dc=test,dc=com' bename = 'test' > create_backend(M1, M2, testbase, bename) /export/tests/tickets/ticket49287_test.py:282: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /export/tests/tickets/ticket49287_test.py:204: in create_backend s1.mappingtree.create(beSuffix, beName) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.mappingTree.MappingTreeLegacy object at 0x7faa475bcee0> suffix = 'dc=test,dc=com', bename = 'test', parent = None def create(self, suffix=None, bename=None, parent=None): ''' Create a mapping tree entry (under "cn=mapping tree,cn=config"), for the 'suffix' and that is stored in 'bename' backend. 'bename' backend must exist before creating the mapping tree entry. If a 'parent' is provided that means that we are creating a sub-suffix mapping tree. @param suffix - suffix mapped by this mapping tree entry. It will be the common name ('cn') of the entry @param benamebase - backend common name (e.g. 'userRoot') @param parent - if provided is a parent suffix of 'suffix' @return DN of the mapping tree entry @raise ldap.NO_SUCH_OBJECT - if the backend entry or parent mapping tree does not exist ValueError - if missing a parameter, ''' # Check suffix is provided if not suffix: raise ValueError("suffix is mandatory") else: nsuffix = normalizeDN(suffix) # Check backend name is provided if not bename: raise ValueError("backend name is mandatory") # Check that if the parent suffix is provided then # it exists a mapping tree for it if parent: nparent = normalizeDN(parent) filt = suffixfilt(parent) try: entry = self.conn.getEntry(DN_MAPPING_TREE, ldap.SCOPE_SUBTREE, filt) pass except NoSuchEntryError: raise ValueError("parent suffix has no mapping tree") else: nparent = "" # Check if suffix exists, return filt = suffixfilt(suffix) try: entry = self.conn.getEntry(DN_MAPPING_TREE, ldap.SCOPE_SUBTREE, filt) return entry except ldap.NO_SUCH_OBJECT: entry = None # # Now start the real work # # fix me when we can actually used escaped DNs dn = ','.join(('cn="%s"' % nsuffix, DN_MAPPING_TREE)) entry = Entry(dn) entry.update({ 'objectclass': ['top', 'extensibleObject', MT_OBJECTCLASS_VALUE], 'nsslapd-state': 'backend', # the value in the dn has to be DN escaped # internal code will add the quoted value - unquoted value is # useful for searching. MT_PROPNAME_TO_ATTRNAME[MT_SUFFIX]: nsuffix, MT_PROPNAME_TO_ATTRNAME[MT_BACKEND]: bename }) # possibly add the parent if parent: entry.setValues(MT_PROPNAME_TO_ATTRNAME[MT_PARENT_SUFFIX], nparent) try: self.log.debug("Creating entry: %s", entry.dn) self.log.info("Entry %r", entry) self.conn.add_s(entry) except ldap.LDAPError as e: > raise ldap.LDAPError("Error adding suffix entry " + dn, e) E ldap.LDAPError: ('Error adding suffix entry cn="dc=test,dc=com",cn=mapping tree,cn=config', UNWILLING_TO_PERFORM({'msgtype': 105, 'msgid': 4, 'result': 53, 'desc': 'Server is unwilling to perform', 'ctrls': []})) /usr/local/lib/python3.8/site-packages/lib389/mappingTree.py:157: LDAPError -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b6dcd788-47c5-4511-b3f1-9ef139edc4b5 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect c725b009-2728-417e-84fc-2019c026a29f / got description=b6dcd788-47c5-4511-b3f1-9ef139edc4b5) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket49287_test:ticket49287_test.py:77 update cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config to add nsDS5ReplicatedAttributeListTotal [32mINFO [0m tests.tickets.ticket49287_test:ticket49287_test.py:77 update cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config to add nsDS5ReplicatedAttributeListTotal [32mINFO [0m lib389:mappingTree.py:154 Entry dn: cn="dc=test,dc=com",cn=mapping tree,cn=config cn: dc=test,dc=com nsslapd-backend: test nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree | |||
Failed | tickets/ticket49303_test.py::test_ticket49303 | 18.35 | |
topo = <lib389.topologies.TopologyMain object at 0x7faa4727bdf0> def test_ticket49303(topo): """ Test the nsTLSAllowClientRenegotiation setting. """ sslport = SECUREPORT_STANDALONE1 log.info("Ticket 49303 - Allow disabling of SSL renegotiation") # No value set, defaults to reneg allowed enable_ssl(topo.standalone, sslport) > assert try_reneg(HOST_STANDALONE1, sslport) is True E AssertionError: assert False is True E + where False = try_reneg('LOCALHOST', 63601) /export/tests/tickets/ticket49303_test.py:88: AssertionError -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket49303_test:ticket49303_test.py:84 Ticket 49303 - Allow disabling of SSL renegotiation | |||
Failed | tickets/ticket49412_test.py::test_ticket49412 | 0.00 | |
topo = <lib389.topologies.TopologyMain object at 0x7faa4712a880> def test_ticket49412(topo): """Specify a test case purpose or name here :id: 4c7681ff-0511-4256-9589-bdcad84c13e6 :setup: Fill in set up configuration here :steps: 1. Fill in test case steps here 2. And indent them like this (RST format requirement) :expectedresults: 1. Fill in the result that is expected 2. For each test step """ M1 = topo.ms["master1"] # wrong call with invalid value (should be str(60) # that create replace with NULL value # it should fail with UNWILLING_TO_PERFORM try: > M1.modify_s(CHANGELOG, [(ldap.MOD_REPLACE, MAXAGE_ATTR, 60), (ldap.MOD_REPLACE, TRIMINTERVAL, 10)]) /export/tests/tickets/ticket49412_test.py:44: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:640: in modify_s return self.modify_ext_s(dn,modlist,None,None) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:613: in modify_ext_s resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:764: in result3 resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:774: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa4710e460> func = <built-in method result4 of LDAP object at 0x7faa4712ad50> args = (39, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.NO_SUCH_OBJECT: {'msgtype': 103, 'msgid': 39, 'result': 32, 'desc': 'No such object', 'ctrls': []} /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:324: NO_SUCH_OBJECT -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for consumer1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:169 Joining consumer consumer1 from master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect c6f2e66b-0929-4aac-9483-1839bac09423 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is working [32mINFO [0m lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 [32mINFO [0m lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 already exists | |||
Failed | tickets/ticket49463_test.py::test_ticket_49463 | 317.07 | |
topo = <lib389.topologies.TopologyMain object at 0x7faa46fc0190> def test_ticket_49463(topo): """Specify a test case purpose or name here :id: 2a68e8be-387d-4ac7-9452-1439e8483c13 :setup: Fill in set up configuration here :steps: 1. Enable fractional replication 2. Enable replication logging 3. Check that replication is working fine 4. Generate skipped updates to create keep alive entries 5. Remove M3 from the topology 6. issue cleanAllRuv FORCE that will run on M1 then propagated M2 and M4 7. Check that Number DEL keep alive '3' is <= 1 8. Check M1 is the originator of cleanAllRuv and M2/M4 the propagated ones 9. Check replication M1,M2 and M4 can recover 10. Remove M4 from the topology 11. Issue cleanAllRuv not force while M2 is stopped (that hangs the cleanAllRuv) 12. Check that nsds5ReplicaCleanRUV is correctly encoded on M1 (last value: 1) 13. Check that nsds5ReplicaCleanRUV encoding survives M1 restart 14. Check that nsds5ReplicaCleanRUV encoding is valid on M2 (last value: 0) 15. Check that (for M4 cleanAllRUV) M1 is Originator and M2 propagation :expectedresults: 1. No report of failure when the RUV is updated """ # Step 1 - Configure fractional (skip telephonenumber) replication M1 = topo.ms["master1"] M2 = topo.ms["master2"] M3 = topo.ms["master3"] M4 = topo.ms["master4"] repl = ReplicationManager(DEFAULT_SUFFIX) fractional_server_to_replica(M1, M2) fractional_server_to_replica(M1, M3) fractional_server_to_replica(M1, M4) fractional_server_to_replica(M2, M1) fractional_server_to_replica(M2, M3) fractional_server_to_replica(M2, M4) fractional_server_to_replica(M3, M1) fractional_server_to_replica(M3, M2) fractional_server_to_replica(M3, M4) fractional_server_to_replica(M4, M1) fractional_server_to_replica(M4, M2) fractional_server_to_replica(M4, M3) # Step 2 - enable internal op logging and replication debug for i in (M1, M2, M3, M4): i.config.loglevel(vals=[256 + 4], service='access') i.config.loglevel(vals=[LOG_REPLICA, LOG_DEFAULT], service='error') # Step 3 - Check that replication is working fine add_user(M1, 11, desc="add to M1") add_user(M2, 21, desc="add to M2") add_user(M3, 31, desc="add to M3") add_user(M4, 41, desc="add to M4") for i in (M1, M2, M3, M4): for j in (M1, M2, M3, M4): if i == j: continue repl.wait_for_replication(i, j) # Step 4 - Generate skipped updates to create keep alive entries for i in (M1, M2, M3, M4): cn = '%s_%d' % (USER_CN, 11) dn = 'uid=%s,ou=People,%s' % (cn, SUFFIX) users = UserAccount(i, dn) for j in range(110): users.set('telephoneNumber', str(j)) # Step 5 - Remove M3 from the topology M3.stop() M1.agreement.delete(suffix=SUFFIX, consumer_host=M3.host, consumer_port=M3.port) M2.agreement.delete(suffix=SUFFIX, consumer_host=M3.host, consumer_port=M3.port) M4.agreement.delete(suffix=SUFFIX, consumer_host=M3.host, consumer_port=M3.port) # Step 6 - Then issue cleanAllRuv FORCE that will run on M1, M2 and M4 M1.tasks.cleanAllRUV(suffix=SUFFIX, replicaid='3', force=True, args={TASK_WAIT: True}) # Step 7 - Count the number of received DEL of the keep alive 3 for i in (M1, M2, M4): i.restart() regex = re.compile(".*DEL dn=.cn=repl keep alive 3.*") for i in (M1, M2, M4): count = count_pattern_accesslog(M1, regex) log.debug("count on %s = %d" % (i, count)) # check that DEL is replicated once (If DEL is kept in the fix) # check that DEL is is not replicated (If DEL is finally no long done in the fix) assert ((count == 1) or (count == 0)) # Step 8 - Check that M1 is Originator of cleanAllRuv and M2, M4 propagation regex = re.compile(".*Original task deletes Keep alive entry .3.*") assert pattern_errorlog(M1, regex) regex = re.compile(".*Propagated task does not delete Keep alive entry .3.*") assert pattern_errorlog(M2, regex) assert pattern_errorlog(M4, regex) # Step 9 - Check replication M1,M2 and M4 can recover add_user(M1, 12, desc="add to M1") add_user(M2, 22, desc="add to M2") for i in (M1, M2, M4): for j in (M1, M2, M4): if i == j: continue repl.wait_for_replication(i, j) # Step 10 - Remove M4 from the topology M4.stop() M1.agreement.delete(suffix=SUFFIX, consumer_host=M4.host, consumer_port=M4.port) M2.agreement.delete(suffix=SUFFIX, consumer_host=M4.host, consumer_port=M4.port) # Step 11 - Issue cleanAllRuv not force while M2 is stopped (that hangs the cleanAllRuv) M2.stop() M1.tasks.cleanAllRUV(suffix=SUFFIX, replicaid='4', force=False, args={TASK_WAIT: False}) # Step 12 # CleanAllRuv is hanging waiting for M2 to restart # Check that nsds5ReplicaCleanRUV is correctly encoded on M1 replicas = Replicas(M1) replica = replicas.list()[0] time.sleep(0.5) replica.present('nsds5ReplicaCleanRUV') log.info("M1: nsds5ReplicaCleanRUV=%s" % replica.get_attr_val_utf8('nsds5replicacleanruv')) regex = re.compile("^4:.*:no:1$") > assert regex.match(replica.get_attr_val_utf8('nsds5replicacleanruv')) E AssertionError: assert None E + where None = <built-in method match of re.Pattern object at 0x7faa461de9d0>('4:no:1:dc=example,dc=com') E + where <built-in method match of re.Pattern object at 0x7faa461de9d0> = re.compile('^4:.*:no:1$').match E + and '4:no:1:dc=example,dc=com' = <bound method DSLdapObject.get_attr_val_utf8 of <lib389.replica.Replica object at 0x7faa47540e20>>('nsds5replicacleanruv') E + where <bound method DSLdapObject.get_attr_val_utf8 of <lib389.replica.Replica object at 0x7faa47540e20>> = <lib389.replica.Replica object at 0x7faa47540e20>.get_attr_val_utf8 /export/tests/tickets/ticket49463_test.py:188: AssertionError -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master3 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'master3', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master4 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39004, 'ldap-secureport': 63704, 'server-id': 'master4', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0ab3a389-82c8-4bdf-94d3-6892d4c8c1c5 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 2238d569-8e88-44e7-bdfb-810a16f5c079 / got description=0ab3a389-82c8-4bdf-94d3-6892d4c8c1c5) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master3 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 035da392-d3db-427b-89fb-4af37f5cc8b8 / got description=2238d569-8e88-44e7-bdfb-810a16f5c079) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 24745056-20db-432c-ae83-1f8254571132 / got description=035da392-d3db-427b-89fb-4af37f5cc8b8) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master4 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 8c0631bd-dd9c-4ed7-b4f3-b1ba09c96133 / got description=24745056-20db-432c-ae83-1f8254571132) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 33b12597-753c-45b6-94bd-e25c5bb14b11 / got description=8c0631bd-dd9c-4ed7-b4f3-b1ba09c96133) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master3 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master4 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master3 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master4 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master2 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master4 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master4 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master4 to master2 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master4 to master3 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 already exists [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 already exists [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 already exists [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 already exists [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 already exists [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 already exists [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 72ab9443-7272-4bf4-8ac3-a06052d92972 / got description=33b12597-753c-45b6-94bd-e25c5bb14b11) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 72ab9443-7272-4bf4-8ac3-a06052d92972 / got description=33b12597-753c-45b6-94bd-e25c5bb14b11) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect be97d851-8bfa-4792-813a-cd81d5db63a6 / got description=72ab9443-7272-4bf4-8ac3-a06052d92972) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect be97d851-8bfa-4792-813a-cd81d5db63a6 / got description=72ab9443-7272-4bf4-8ac3-a06052d92972) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect be97d851-8bfa-4792-813a-cd81d5db63a6 / got description=72ab9443-7272-4bf4-8ac3-a06052d92972) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect be97d851-8bfa-4792-813a-cd81d5db63a6 / got description=72ab9443-7272-4bf4-8ac3-a06052d92972) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect dfbe3e01-f92e-4574-8458-29f829b6ad17 / got description=be97d851-8bfa-4792-813a-cd81d5db63a6) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect dfbe3e01-f92e-4574-8458-29f829b6ad17 / got description=be97d851-8bfa-4792-813a-cd81d5db63a6) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect dfbe3e01-f92e-4574-8458-29f829b6ad17 / got description=be97d851-8bfa-4792-813a-cd81d5db63a6) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect dfbe3e01-f92e-4574-8458-29f829b6ad17 / got description=be97d851-8bfa-4792-813a-cd81d5db63a6) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect dfbe3e01-f92e-4574-8458-29f829b6ad17 / got description=be97d851-8bfa-4792-813a-cd81d5db63a6) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ef80c1bb-dbd5-4317-8682-c5dbe0e14ea3 / got description=dfbe3e01-f92e-4574-8458-29f829b6ad17) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ef80c1bb-dbd5-4317-8682-c5dbe0e14ea3 / got description=dfbe3e01-f92e-4574-8458-29f829b6ad17) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 3f50a124-374b-4099-8750-c8945e8c7710 / got description=ef80c1bb-dbd5-4317-8682-c5dbe0e14ea3) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 3f50a124-374b-4099-8750-c8945e8c7710 / got description=ef80c1bb-dbd5-4317-8682-c5dbe0e14ea3) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 3f50a124-374b-4099-8750-c8945e8c7710 / got description=ef80c1bb-dbd5-4317-8682-c5dbe0e14ea3) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 3befa8e6-0f84-41d9-82e3-50ff6cd36189 / got description=ef80c1bb-dbd5-4317-8682-c5dbe0e14ea3) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 3befa8e6-0f84-41d9-82e3-50ff6cd36189 / got description=3f50a124-374b-4099-8750-c8945e8c7710) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 3befa8e6-0f84-41d9-82e3-50ff6cd36189 / got description=3f50a124-374b-4099-8750-c8945e8c7710) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 3befa8e6-0f84-41d9-82e3-50ff6cd36189 / got description=3f50a124-374b-4099-8750-c8945e8c7710) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 30b64620-91d7-476a-a050-dabab66596c3 / got description=3befa8e6-0f84-41d9-82e3-50ff6cd36189) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 30b64620-91d7-476a-a050-dabab66596c3 / got description=3befa8e6-0f84-41d9-82e3-50ff6cd36189) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 7374a7e6-d250-463c-a6e8-1e16c3a8d0b1 / got description=30b64620-91d7-476a-a050-dabab66596c3) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 7374a7e6-d250-463c-a6e8-1e16c3a8d0b1 / got description=30b64620-91d7-476a-a050-dabab66596c3) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 7374a7e6-d250-463c-a6e8-1e16c3a8d0b1 / got description=30b64620-91d7-476a-a050-dabab66596c3) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect a28cd0f2-873d-44c4-ae5a-31790816242f / got description=7374a7e6-d250-463c-a6e8-1e16c3a8d0b1) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect d311decf-3cab-4e7e-808a-5c5731e4d015 / got description=7374a7e6-d250-463c-a6e8-1e16c3a8d0b1) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 033f48e4-81c6-4d87-83f1-aee7e82182aa / got description=d311decf-3cab-4e7e-808a-5c5731e4d015) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 8079eecb-b192-417f-9f42-49265d172842 / got description=033f48e4-81c6-4d87-83f1-aee7e82182aa) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 8079eecb-b192-417f-9f42-49265d172842 / got description=033f48e4-81c6-4d87-83f1-aee7e82182aa) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389:agreement.py:1095 Agreement (cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config) was successfully removed [32mINFO [0m lib389:agreement.py:1095 Agreement (cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config) was successfully removed [32mINFO [0m lib389:agreement.py:1095 Agreement (cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config) was successfully removed [32mINFO [0m lib389:tasks.py:1400 cleanAllRUV task (task-11102020_003712) completed successfully [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 08b7a7e2-625c-4019-af21-3411677ba9b3 / got description=8079eecb-b192-417f-9f42-49265d172842) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 08b7a7e2-625c-4019-af21-3411677ba9b3 / got description=8079eecb-b192-417f-9f42-49265d172842) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect d37c3e8a-6d57-4514-8acb-1022a0fc6cc1 / got description=08b7a7e2-625c-4019-af21-3411677ba9b3) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ea921910-2ee5-4325-a9a7-1c86dc4e2afb / got description=d37c3e8a-6d57-4514-8acb-1022a0fc6cc1) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect d55d8212-3491-4254-960f-1cf794992c0e / got description=ea921910-2ee5-4325-a9a7-1c86dc4e2afb) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect d55d8212-3491-4254-960f-1cf794992c0e / got description=ea921910-2ee5-4325-a9a7-1c86dc4e2afb) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 876ef8cc-4b5b-43ce-af0d-ba7c023105f2 / got description=d55d8212-3491-4254-960f-1cf794992c0e) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect d6163249-ef9b-45cb-aa9d-a35a566c7510 / got description=876ef8cc-4b5b-43ce-af0d-ba7c023105f2) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389:agreement.py:1095 Agreement (cn=004,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config) was successfully removed [32mINFO [0m lib389:agreement.py:1095 Agreement (cn=004,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config) was successfully removed [32mINFO [0m lib389:tasks.py:1400 cleanAllRUV task (task-11102020_003808) completed successfully [32mINFO [0m lib389.utils:ticket49463_test.py:186 M1: nsds5ReplicaCleanRUV=4:no:1:dc=example,dc=com | |||
Failed | tickets/ticket50232_test.py::test_ticket50232_normal | 0.53 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7faa46de77c0> def test_ticket50232_normal(topology_st): """ The fix for ticket 50232 The test sequence is: - create suffix - add suffix entry and some child entries - "normally" done after populating suffix: enable replication - get RUV and database generation - export -r - import - get RUV and database generation - assert database generation has not changed """ log.info('Testing Ticket 50232 - export creates not imprtable ldif file, normal creation order') topology_st.standalone.backend.create(NORMAL_SUFFIX, {BACKEND_NAME: NORMAL_BACKEND_NAME}) topology_st.standalone.mappingtree.create(NORMAL_SUFFIX, bename=NORMAL_BACKEND_NAME, parent=None) _populate_suffix(topology_st.standalone, NORMAL_BACKEND_NAME) repl = ReplicationManager(DEFAULT_SUFFIX) > repl._ensure_changelog(topology_st.standalone) /export/tests/tickets/ticket50232_test.py:113: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.8/site-packages/lib389/replica.py:1928: in _ensure_changelog cl.create(properties={ /usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:971: in create return self._create(rdn, properties, basedn, ensure=False) /usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:946: in _create self._instance.add_ext_s(e, serverctrls=self._server_controls, clientctrls=self._client_controls, escapehatch='i am sure') /usr/local/lib/python3.8/site-packages/lib389/__init__.py:176: in inner return f(ent.dn, ent.toTupleList(), *args[2:]) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:425: in add_ext_s resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:764: in result3 resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:774: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa46de7490> func = <built-in method result4 of LDAP object at 0x7faa46dfb480> args = (13, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.UNWILLING_TO_PERFORM: {'msgtype': 105, 'msgid': 13, 'result': 53, 'desc': 'Server is unwilling to perform', 'ctrls': [], 'info': 'Changelog configuration is part of the backend configuration'} /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:324: UNWILLING_TO_PERFORM -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:backend.py:80 List backend with suffix=o=normal [32mINFO [0m lib389:backend.py:290 Creating a local backend [32mINFO [0m lib389:backend.py:76 List backend cn=normal,cn=ldbm database,cn=plugins,cn=config [32mINFO [0m lib389:__init__.py:1713 Found entry dn: cn=normal,cn=ldbm database,cn=plugins,cn=config cn: normal nsslapd-cachememsize: 512000 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-standalone1/db/normal nsslapd-dncachememsize: 16777216 nsslapd-readonly: off nsslapd-require-index: off nsslapd-require-internalop-index: off nsslapd-suffix: o=normal objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance [32mINFO [0m lib389:mappingTree.py:154 Entry dn: cn="o=normal",cn=mapping tree,cn=config cn: o=normal nsslapd-backend: normal nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree [32mINFO [0m lib389:__init__.py:1713 Found entry dn: cn=o\3Dnormal,cn=mapping tree,cn=config cn: o=normal nsslapd-backend: normal nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree | |||
Failed | tickets/ticket50232_test.py::test_ticket50232_reverse | 0.16 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7faa46de77c0> def test_ticket50232_reverse(topology_st): """ The fix for ticket 50232 The test sequence is: - create suffix - enable replication before suffix enztry is added - add suffix entry and some child entries - get RUV and database generation - export -r - import - get RUV and database generation - assert database generation has not changed """ log.info('Testing Ticket 50232 - export creates not imprtable ldif file, normal creation order') # # Setup Replication # log.info('Setting up replication...') repl = ReplicationManager(DEFAULT_SUFFIX) # repl.create_first_master(topology_st.standalone) # # enable dynamic plugins, memberof and retro cl plugin # topology_st.standalone.backend.create(REVERSE_SUFFIX, {BACKEND_NAME: REVERSE_BACKEND_NAME}) topology_st.standalone.mappingtree.create(REVERSE_SUFFIX, bename=REVERSE_BACKEND_NAME, parent=None) > _enable_replica(topology_st.standalone, REVERSE_SUFFIX) /export/tests/tickets/ticket50232_test.py:155: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /export/tests/tickets/ticket50232_test.py:35: in _enable_replica repl._ensure_changelog(instance) /usr/local/lib/python3.8/site-packages/lib389/replica.py:1928: in _ensure_changelog cl.create(properties={ /usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:971: in create return self._create(rdn, properties, basedn, ensure=False) /usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:946: in _create self._instance.add_ext_s(e, serverctrls=self._server_controls, clientctrls=self._client_controls, escapehatch='i am sure') /usr/local/lib/python3.8/site-packages/lib389/__init__.py:176: in inner return f(ent.dn, ent.toTupleList(), *args[2:]) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:425: in add_ext_s resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:764: in result3 resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:774: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa46de7490> func = <built-in method result4 of LDAP object at 0x7faa46dfb480> args = (22, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.UNWILLING_TO_PERFORM: {'msgtype': 105, 'msgid': 22, 'result': 53, 'desc': 'Server is unwilling to perform', 'ctrls': [], 'info': 'Changelog configuration is part of the backend configuration'} /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:324: UNWILLING_TO_PERFORM -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:backend.py:80 List backend with suffix=o=reverse [32mINFO [0m lib389:backend.py:290 Creating a local backend [32mINFO [0m lib389:backend.py:76 List backend cn=reverse,cn=ldbm database,cn=plugins,cn=config [32mINFO [0m lib389:__init__.py:1713 Found entry dn: cn=reverse,cn=ldbm database,cn=plugins,cn=config cn: reverse nsslapd-cachememsize: 512000 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-standalone1/db/reverse nsslapd-dncachememsize: 16777216 nsslapd-readonly: off nsslapd-require-index: off nsslapd-require-internalop-index: off nsslapd-suffix: o=reverse objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance [32mINFO [0m lib389:mappingTree.py:154 Entry dn: cn="o=reverse",cn=mapping tree,cn=config cn: o=reverse nsslapd-backend: reverse nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree [32mINFO [0m lib389:__init__.py:1713 Found entry dn: cn=o\3Dreverse,cn=mapping tree,cn=config cn: o=reverse nsslapd-backend: reverse nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree | |||
Failed | tickets/ticket548_test.py::test_ticket548_test_with_no_policy | 0.09 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7faa4757fb20> def test_ticket548_test_with_no_policy(topology_st): """ Check shadowAccount under no password policy """ log.info("Case 1. No password policy") log.info("Bind as %s" % DN_DM) topology_st.standalone.simple_bind_s(DN_DM, PASSWORD) log.info('Add an entry' + USER1_DN) try: topology_st.standalone.add_s( Entry((USER1_DN, {'objectclass': "top person organizationalPerson inetOrgPerson shadowAccount".split(), 'sn': '1', 'cn': 'user 1', 'uid': 'user1', 'givenname': 'user', 'mail': 'user1@' + DEFAULT_SUFFIX, 'userpassword': USER_PW}))) except ldap.LDAPError as e: log.fatal('test_ticket548: Failed to add user' + USER1_DN + ': error ' + e.message['desc']) assert False edate = int(time.time() / (60 * 60 * 24)) log.info('Search entry %s' % USER1_DN) log.info("Bind as %s" % USER1_DN) topology_st.standalone.simple_bind_s(USER1_DN, USER_PW) > entry = topology_st.standalone.getEntry(USER1_DN, ldap.SCOPE_BASE, "(objectclass=*)", ['shadowLastChange']) /export/tests/tickets/ticket548_test.py:211: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa4757f7f0> args = ('uid=user1,dc=example,dc=com', 0, '(objectclass=*)', ['shadowLastChange']) kwargs = {}, res = 6, restype = 101, obj = [] def getEntry(self, *args, **kwargs): """Wrapper around SimpleLDAPObject.search. It is common to just get one entry. @param - entry dn @param - search scope, in ldap.SCOPE_BASE (default), ldap.SCOPE_SUB, ldap.SCOPE_ONE @param filterstr - filterstr, default '(objectClass=*)' from SimpleLDAPObject @param attrlist - list of attributes to retrieve. eg ['cn', 'uid'] @oaram attrsonly - default None from SimpleLDAPObject eg. getEntry(dn, scope, filter, attributes) XXX This cannot return None """ self.log.debug("Retrieving entry with %r", [args]) if len(args) == 1 and 'scope' not in kwargs: args += (ldap.SCOPE_BASE, ) res = self.search(*args, **kwargs) restype, obj = self.result(res) # TODO: why not test restype? if not obj: > raise NoSuchEntryError("no such entry for %r", [args]) E lib389.exceptions.NoSuchEntryError: ('no such entry for %r', [('uid=user1,dc=example,dc=com', 0, '(objectclass=*)', ['shadowLastChange'])]) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:1700: NoSuchEntryError -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Failed | tickets/ticket548_test.py::test_ticket548_test_global_policy | 0.18 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7faa4757fb20> def test_ticket548_test_global_policy(topology_st): """ Check shadowAccount with global password policy """ log.info("Case 2. Check shadowAccount with global password policy") log.info("Bind as %s" % DN_DM) topology_st.standalone.simple_bind_s(DN_DM, PASSWORD) set_global_pwpolicy(topology_st) log.info('Add an entry' + USER2_DN) try: topology_st.standalone.add_s( Entry((USER2_DN, {'objectclass': "top person organizationalPerson inetOrgPerson shadowAccount".split(), 'sn': '2', 'cn': 'user 2', 'uid': 'user2', 'givenname': 'user', 'mail': 'user2@' + DEFAULT_SUFFIX, 'userpassword': USER_PW}))) except ldap.LDAPError as e: log.fatal('test_ticket548: Failed to add user' + USER2_DN + ': error ' + e.message['desc']) assert False edate = int(time.time() / (60 * 60 * 24)) log.info("Bind as %s" % USER1_DN) topology_st.standalone.simple_bind_s(USER1_DN, USER_PW) log.info('Search entry %s' % USER1_DN) > entry = topology_st.standalone.getEntry(USER1_DN, ldap.SCOPE_BASE, "(objectclass=*)") /export/tests/tickets/ticket548_test.py:249: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa4757f7f0> args = ('uid=user1,dc=example,dc=com', 0, '(objectclass=*)'), kwargs = {} res = 15, restype = 101, obj = [] def getEntry(self, *args, **kwargs): """Wrapper around SimpleLDAPObject.search. It is common to just get one entry. @param - entry dn @param - search scope, in ldap.SCOPE_BASE (default), ldap.SCOPE_SUB, ldap.SCOPE_ONE @param filterstr - filterstr, default '(objectClass=*)' from SimpleLDAPObject @param attrlist - list of attributes to retrieve. eg ['cn', 'uid'] @oaram attrsonly - default None from SimpleLDAPObject eg. getEntry(dn, scope, filter, attributes) XXX This cannot return None """ self.log.debug("Retrieving entry with %r", [args]) if len(args) == 1 and 'scope' not in kwargs: args += (ldap.SCOPE_BASE, ) res = self.search(*args, **kwargs) restype, obj = self.result(res) # TODO: why not test restype? if not obj: > raise NoSuchEntryError("no such entry for %r", [args]) E lib389.exceptions.NoSuchEntryError: ('no such entry for %r', [('uid=user1,dc=example,dc=com', 0, '(objectclass=*)')]) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:1700: NoSuchEntryError | |||
Failed | tickets/ticket548_test.py::test_ticket548_test_subtree_policy | 2.24 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7faa4757fb20> user = 'uid=user3,dc=example,dc=com', passwd = 'password' newpasswd = 'password0' def update_passwd(topology_st, user, passwd, newpasswd): log.info(" Bind as {%s,%s}" % (user, passwd)) topology_st.standalone.simple_bind_s(user, passwd) try: > topology_st.standalone.modify_s(user, [(ldap.MOD_REPLACE, 'userpassword', newpasswd.encode())]) /export/tests/tickets/ticket548_test.py:160: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = ('uid=user3,dc=example,dc=com', [(2, 'userpassword', b'password0')]) kwargs = {} c_stack = [FrameInfo(frame=<frame at 0x7faa44724840, file '/usr/local/lib/python3.8/site-packages/lib389/__init__.py', line 180,...mbda>', code_context=[' self._inner_hookexec = lambda hook, methods, kwargs: hook.multicall(\n'], index=0), ...] frame = FrameInfo(frame=<frame at 0x55ebeb99b980, file '/export/tests/tickets/ticket548_test.py', line 164, code update_passwd...[" topology_st.standalone.modify_s(user, [(ldap.MOD_REPLACE, 'userpassword', newpasswd.encode())])\n"], index=0) def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa4757f7f0> dn = 'uid=user3,dc=example,dc=com' modlist = [(2, 'userpassword', b'password0')] def modify_s(self,dn,modlist): > return self.modify_ext_s(dn,modlist,None,None) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:640: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = ('uid=user3,dc=example,dc=com', [(2, 'userpassword', b'password0')], None, None) kwargs = {} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa4757f7f0> dn = 'uid=user3,dc=example,dc=com' modlist = [(2, 'userpassword', b'password0')], serverctrls = None clientctrls = None def modify_ext_s(self,dn,modlist,serverctrls=None,clientctrls=None): msgid = self.modify_ext(dn,modlist,serverctrls,clientctrls) > resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:613: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (34,), kwargs = {'all': 1, 'timeout': -1} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa4757f7f0>, msgid = 34, all = 1 timeout = -1, resp_ctrl_classes = None def result3(self,msgid=ldap.RES_ANY,all=1,timeout=None,resp_ctrl_classes=None): > resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( msgid,all,timeout, add_ctrls=0,add_intermediates=0,add_extop=0, resp_ctrl_classes=resp_ctrl_classes ) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:764: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (34, 1, -1) kwargs = {'add_ctrls': 0, 'add_extop': 0, 'add_intermediates': 0, 'resp_ctrl_classes': None} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa4757f7f0>, msgid = 34, all = 1 timeout = -1, add_ctrls = 0, add_intermediates = 0, add_extop = 0 resp_ctrl_classes = None def result4(self,msgid=ldap.RES_ANY,all=1,timeout=None,add_ctrls=0,add_intermediates=0,add_extop=0,resp_ctrl_classes=None): if timeout is None: timeout = self.timeout > ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:774: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (<built-in method result4 of LDAP object at 0x7faa47068e40>, 34, 1, -1, 0, 0, ...) kwargs = {} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa4757f7f0> func = <built-in method result4 of LDAP object at 0x7faa47068e40> args = (34, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: result = func(*args,**kwargs) if __debug__ and self._trace_level>=2: if func.__name__!="unbind_ext": diagnostic_message_success = self._l.get_option(ldap.OPT_DIAGNOSTIC_MESSAGE) finally: self._ldap_object_lock.release() except LDAPError as e: exc_type,exc_value,exc_traceback = sys.exc_info() try: if 'info' not in e.args[0] and 'errno' in e.args[0]: e.args[0]['info'] = strerror(e.args[0]['errno']) except IndexError: pass if __debug__ and self._trace_level>=2: self._trace_file.write('=> LDAPError - %s: %s\n' % (e.__class__.__name__,str(e))) try: > reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:340: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ exc_type = <class 'ldap.INSUFFICIENT_ACCESS'> exc_value = INSUFFICIENT_ACCESS({'msgtype': 103, 'msgid': 34, 'result': 50, 'desc': 'Insufficient access', 'ctrls': [], 'info': "Insufficient 'write' privilege to the 'userPassword' attribute of entry 'uid=user3,dc=example,dc=com'.\n"}) exc_traceback = <traceback object at 0x7faa46f5d980> def reraise(exc_type, exc_value, exc_traceback): """Re-raise an exception given information from sys.exc_info() Note that unlike six.reraise, this does not support replacing the traceback. All arguments must come from a single sys.exc_info() call. """ # In Python 3, all exception info is contained in one object. > raise exc_value /usr/lib64/python3.8/site-packages/ldap/compat.py:46: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa4757f7f0> func = <built-in method result4 of LDAP object at 0x7faa47068e40> args = (34, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.INSUFFICIENT_ACCESS: {'msgtype': 103, 'msgid': 34, 'result': 50, 'desc': 'Insufficient access', 'ctrls': [], 'info': "Insufficient 'write' privilege to the 'userPassword' attribute of entry 'uid=user3,dc=example,dc=com'.\n"} /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:324: INSUFFICIENT_ACCESS During handling of the above exception, another exception occurred: topology_st = <lib389.topologies.TopologyMain object at 0x7faa4757fb20> def test_ticket548_test_subtree_policy(topology_st): """ Check shadowAccount with subtree level password policy """ log.info("Case 3. Check shadowAccount with subtree level password policy") log.info("Bind as %s" % DN_DM) topology_st.standalone.simple_bind_s(DN_DM, PASSWORD) # Check the global policy values set_subtree_pwpolicy(topology_st, 2, 20, 6) log.info('Add an entry' + USER3_DN) try: topology_st.standalone.add_s( Entry((USER3_DN, {'objectclass': "top person organizationalPerson inetOrgPerson shadowAccount".split(), 'sn': '3', 'cn': 'user 3', 'uid': 'user3', 'givenname': 'user', 'mail': 'user3@' + DEFAULT_SUFFIX, 'userpassword': USER_PW}))) except ldap.LDAPError as e: log.fatal('test_ticket548: Failed to add user' + USER3_DN + ': error ' + e.message['desc']) assert False log.info('Search entry %s' % USER3_DN) entry0 = topology_st.standalone.getEntry(USER3_DN, ldap.SCOPE_BASE, "(objectclass=*)") log.info('Expecting shadowLastChange 0 since passwordMustChange is on') check_shadow_attr_value(entry0, 'shadowLastChange', 0, USER3_DN) # passwordMinAge -- 2 day check_shadow_attr_value(entry0, 'shadowMin', 2, USER3_DN) # passwordMaxAge -- 20 days check_shadow_attr_value(entry0, 'shadowMax', 20, USER3_DN) # passwordWarning -- 6 days check_shadow_attr_value(entry0, 'shadowWarning', 6, USER3_DN) log.info("Bind as %s" % USER3_DN) topology_st.standalone.simple_bind_s(USER3_DN, USER_PW) log.info('Search entry %s' % USER3_DN) try: entry1 = topology_st.standalone.getEntry(USER3_DN, ldap.SCOPE_BASE, "(objectclass=*)") except ldap.UNWILLING_TO_PERFORM: log.info('test_ticket548: Search by' + USER3_DN + ' failed by UNWILLING_TO_PERFORM as expected') except ldap.LDAPError as e: log.fatal('test_ticket548: Failed to serch user' + USER3_DN + ' by self: error ' + e.message['desc']) assert False log.info("Bind as %s and updating the password with a new one" % USER3_DN) topology_st.standalone.simple_bind_s(USER3_DN, USER_PW) # Bind as DM again, change policy log.info("Bind as %s" % DN_DM) topology_st.standalone.simple_bind_s(DN_DM, PASSWORD) set_subtree_pwpolicy(topology_st, 4, 40, 12) newpasswd = USER_PW + '0' > update_passwd(topology_st, USER3_DN, USER_PW, newpasswd) /export/tests/tickets/ticket548_test.py:372: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ topology_st = <lib389.topologies.TopologyMain object at 0x7faa4757fb20> user = 'uid=user3,dc=example,dc=com', passwd = 'password' newpasswd = 'password0' def update_passwd(topology_st, user, passwd, newpasswd): log.info(" Bind as {%s,%s}" % (user, passwd)) topology_st.standalone.simple_bind_s(user, passwd) try: topology_st.standalone.modify_s(user, [(ldap.MOD_REPLACE, 'userpassword', newpasswd.encode())]) except ldap.LDAPError as e: > log.fatal('test_ticket548: Failed to update the password ' + cpw + ' of user ' + user + ': error ' + e.message[ 'desc']) E NameError: name 'cpw' is not defined /export/tests/tickets/ticket548_test.py:162: NameError | |||
XFailed | suites/acl/syntax_test.py::test_aci_invalid_syntax_fail[test_targattrfilters_18] | 0.01 | |
topo = <lib389.topologies.TopologyMain object at 0x7faa5a034610> real_value = '(target = ldap:///cn=Jeff Vedder,ou=Product Development,dc=example,dc=com)(targetattr="*")(version 3.0; acl "Name of ...3123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123";)' @pytest.mark.xfail(reason='https://bugzilla.redhat.com/show_bug.cgi?id=1691473') @pytest.mark.parametrize("real_value", [a[1] for a in FAILED], ids=[a[0] for a in FAILED]) def test_aci_invalid_syntax_fail(topo, real_value): """ Try to set wrong ACI syntax. :id: 83c40784-fff5-49c8-9535-7064c9c19e7e :parametrized: yes :setup: Standalone Instance :steps: 1. Create ACI 2. Try to setup the ACI with Instance :expectedresults: 1. It should pass 2. It should not pass """ domain = Domain(topo.standalone, DEFAULT_SUFFIX) with pytest.raises(ldap.INVALID_SYNTAX): > domain.add("aci", real_value) E Failed: DID NOT RAISE <class 'ldap.INVALID_SYNTAX'> suites/acl/syntax_test.py:213: Failed -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
XFailed | suites/acl/syntax_test.py::test_aci_invalid_syntax_fail[test_targattrfilters_20] | 0.02 | |
topo = <lib389.topologies.TopologyMain object at 0x7faa5a034610> real_value = '(target = ldap:///cn=Jeff Vedder,ou=Product Development,dc=example,dc=com)(targetattr="*")(version 3.0; acl "Name of the ACI"; deny(write)userdns="ldap:///anyone";)' @pytest.mark.xfail(reason='https://bugzilla.redhat.com/show_bug.cgi?id=1691473') @pytest.mark.parametrize("real_value", [a[1] for a in FAILED], ids=[a[0] for a in FAILED]) def test_aci_invalid_syntax_fail(topo, real_value): """ Try to set wrong ACI syntax. :id: 83c40784-fff5-49c8-9535-7064c9c19e7e :parametrized: yes :setup: Standalone Instance :steps: 1. Create ACI 2. Try to setup the ACI with Instance :expectedresults: 1. It should pass 2. It should not pass """ domain = Domain(topo.standalone, DEFAULT_SUFFIX) with pytest.raises(ldap.INVALID_SYNTAX): > domain.add("aci", real_value) E Failed: DID NOT RAISE <class 'ldap.INVALID_SYNTAX'> suites/acl/syntax_test.py:213: Failed | |||
XFailed | suites/acl/syntax_test.py::test_aci_invalid_syntax_fail[test_bind_rule_set_with_more_than_three] | 0.02 | |
topo = <lib389.topologies.TopologyMain object at 0x7faa5a034610> real_value = '(target = ldap:///dc=example,dc=com)(targetattr="*")(version 3.0; acl "Name of the ACI"; deny absolute (all)userdn="ldap:////////anyone";)' @pytest.mark.xfail(reason='https://bugzilla.redhat.com/show_bug.cgi?id=1691473') @pytest.mark.parametrize("real_value", [a[1] for a in FAILED], ids=[a[0] for a in FAILED]) def test_aci_invalid_syntax_fail(topo, real_value): """ Try to set wrong ACI syntax. :id: 83c40784-fff5-49c8-9535-7064c9c19e7e :parametrized: yes :setup: Standalone Instance :steps: 1. Create ACI 2. Try to setup the ACI with Instance :expectedresults: 1. It should pass 2. It should not pass """ domain = Domain(topo.standalone, DEFAULT_SUFFIX) with pytest.raises(ldap.INVALID_SYNTAX): > domain.add("aci", real_value) E Failed: DID NOT RAISE <class 'ldap.INVALID_SYNTAX'> suites/acl/syntax_test.py:213: Failed | |||
XFailed | suites/acl/userattr_test.py::test_mod_see_also_positive[(LEVEL_3, CHILDREN)] | 0.08 | |
topo = <lib389.topologies.TopologyMain object at 0x7faa5b737a00> _add_user = None, user = 'uid=Grandparent,ou=Inheritance,dc=example,dc=com' entry = 'ou=CHILDREN,ou=PARENTS,ou=GRANDPARENTS,ou=ANCESTORS,ou=Inheritance,dc=example,dc=com' @pytest.mark.parametrize("user,entry", [ (CAN, ROLEDNACCESS), (CAN, USERDNACCESS), (CAN, GROUPDNACCESS), (CAN, LDAPURLACCESS), (CAN, ATTRNAMEACCESS), (LEVEL_0, OU_2), (LEVEL_1, ANCESTORS), (LEVEL_2, GRANDPARENTS), (LEVEL_4, OU_2), (LEVEL_4, ANCESTORS), (LEVEL_4, GRANDPARENTS), (LEVEL_4, PARENTS), (LEVEL_4, CHILDREN), pytest.param(LEVEL_3, CHILDREN, marks=pytest.mark.xfail(reason="May be some bug")), ], ids=[ "(CAN,ROLEDNACCESS)", "(CAN,USERDNACCESS)", "(CAN,GROUPDNACCESS)", "(CAN,LDAPURLACCESS)", "(CAN,ATTRNAMEACCESS)", "(LEVEL_0, OU_2)", "(LEVEL_1,ANCESTORS)", "(LEVEL_2,GRANDPARENTS)", "(LEVEL_4,OU_2)", "(LEVEL_4, ANCESTORS)", "(LEVEL_4,GRANDPARENTS)", "(LEVEL_4,PARENTS)", "(LEVEL_4,CHILDREN)", "(LEVEL_3, CHILDREN)" ]) def test_mod_see_also_positive(topo, _add_user, user, entry): """ Try to set seeAlso on entry with binding specific user, it will success as per the ACI. :id: 65745426-7a01-11e8-8ac2-8c16451d917b :parametrized: yes :setup: Standalone Instance :steps: 1. Add test entry 2. Add ACI 3. User should follow ACI role :expectedresults: 1. Entry should be added 2. Operation should succeed 3. Operation should succeed """ conn = UserAccount(topo.standalone, user).bind(PW_DM) > UserAccount(conn, entry).replace('seeAlso', 'cn=1') suites/acl/userattr_test.py:216: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:280: in replace self.set(key, value, action=ldap.MOD_REPLACE) /usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:446: in set return self._instance.modify_ext_s(self._dn, [(action, key, value)], /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:613: in modify_ext_s resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:764: in result3 resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:774: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner return f(*args, **kwargs) /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7faa59f6b700> func = <built-in method result4 of LDAP object at 0x7faa59f76cf0> args = (5, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.INSUFFICIENT_ACCESS: {'msgtype': 103, 'msgid': 5, 'result': 50, 'desc': 'Insufficient access', 'ctrls': [], 'info': "Insufficient 'write' privilege to the 'seeAlso' attribute of entry 'ou=children,ou=parents,ou=grandparents,ou=ancestors,ou=inheritance,dc=example,dc=com'.\n"} /usr/lib64/python3.8/site-packages/ldap/ldapobject.py:324: INSUFFICIENT_ACCESS | |||
XFailed | suites/config/config_test.py::test_defaultnamingcontext_1 | 0.29 | |
topo = <lib389.topologies.TopologyMain object at 0x7faa58972550> @pytest.mark.xfail(reason="This may fail due to bug 1610234") def test_defaultnamingcontext_1(topo): """This test case should be part of function test_defaultnamingcontext Please move it back after we have a fix for bug 1610234 """ log.info("Remove the original suffix which is currently nsslapd-defaultnamingcontext" "and check nsslapd-defaultnamingcontext become empty.") """ Please remove these declarations after moving the test to function test_defaultnamingcontext """ backends = Backends(topo.standalone) test_db2 = 'test2_db' test_suffix2 = 'dc=test2,dc=com' b2 = backends.create(properties={'cn': test_db2, 'nsslapd-suffix': test_suffix2}) b2.delete() > assert topo.standalone.config.get_attr_val_utf8('nsslapd-defaultnamingcontext') == ' ' E AssertionError: assert 'dc=example,dc=com' == ' ' E Strings contain only whitespace, escaping them using repr() E - ' ' E + 'dc=example,dc=com' suites/config/config_test.py:280: AssertionError -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.config.config_test:config_test.py:268 Remove the original suffix which is currently nsslapd-defaultnamingcontextand check nsslapd-defaultnamingcontext become empty. | |||
XFailed | suites/export/export_test.py::test_dbtasks_db2ldif_with_non_accessible_ldif_file_path_output | 3.61 | |
topo = <lib389.topologies.TopologyMain object at 0x7faa5569d8e0> @pytest.mark.bz1860291 @pytest.mark.xfail(reason="bug 1860291") @pytest.mark.skipif(ds_is_older("1.3.10", "1.4.2"), reason="Not implemented") def test_dbtasks_db2ldif_with_non_accessible_ldif_file_path_output(topo): """Export with db2ldif, giving a ldif file path which can't be accessed by the user (dirsrv by default) :id: fcc63387-e650-40a7-b643-baa68c190037 :setup: Standalone Instance - entries imported in the db :steps: 1. Stop the server 2. Launch db2ldif with a non accessible ldif file path 3. check the error reported in the command output :expected results: 1. Operation successful 2. Operation properly fails 3. An clear error message is reported as output of the cli """ export_ldif = '/tmp/nonexistent/export.ldif' log.info("Stopping the instance...") topo.standalone.stop() log.info("Performing an offline export to a non accessible ldif file path - should fail and output a clear error message") expected_output="No such file or directory" > run_db2ldif_and_clear_logs(topo, topo.standalone, DEFAULT_BENAME, export_ldif, expected_output) suites/export/export_test.py:150: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ topology = <lib389.topologies.TopologyMain object at 0x7faa5569d8e0> instance = <lib389.DirSrv object at 0x7faa55759700>, backend = 'userRoot' ldif = '/tmp/nonexistent/export.ldif', output_msg = 'No such file or directory' encrypt = False, repl = False def run_db2ldif_and_clear_logs(topology, instance, backend, ldif, output_msg, encrypt=False, repl=False): args = FakeArgs() args.instance = instance.serverid args.backend = backend args.encrypted = encrypt args.replication = repl args.ldif = ldif dbtasks_db2ldif(instance, topology.logcap.log, args) log.info('checking output msg') if not topology.logcap.contains(output_msg): log.error('The output message is not the expected one') > assert False E assert False suites/export/export_test.py:36: AssertionError ------------------------------Captured stderr call------------------------------ ldiffile: /tmp/nonexistent/export.ldif -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:export_test.py:145 Stopping the instance... [32mINFO [0m lib389.utils:export_test.py:148 Performing an offline export to a non accessible ldif file path - should fail and output a clear error message [31mCRITICAL[0m LogCapture:dbtasks.py:40 db2ldif failed [32mINFO [0m lib389.utils:export_test.py:33 checking output msg [31m[1mERROR [0m lib389.utils:export_test.py:35 The output message is not the expected one | |||
XFailed | suites/healthcheck/healthcheck_test.py::test_healthcheck_unable_to_query_backend | 1.72 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7faa45b31e50> @pytest.mark.ds50873 @pytest.mark.bz1796343 @pytest.mark.skipif(ds_is_older("1.4.1"), reason="Not implemented") @pytest.mark.xfail(reason="Will fail because of bz1837315. Set proper version after bug is fixed") def test_healthcheck_unable_to_query_backend(topology_st): """Check if HealthCheck returns DSBLE0002 code :id: 716b1ff1-94bd-4780-98b8-96ff8ef21e30 :setup: Standalone instance :steps: 1. Create DS instance 2. Create a new root suffix and database 3. Disable new suffix 4. Use HealthCheck without --json option 5. Use HealthCheck with --json option :expectedresults: 1. Success 2. Success 3. Success 4. HealthCheck should return code DSBLE0002 5. HealthCheck should return code DSBLE0002 """ RET_CODE = 'DSBLE0002' NEW_SUFFIX = 'dc=test,dc=com' NEW_BACKEND = 'userData' standalone = topology_st.standalone log.info('Create new suffix') backends = Backends(standalone) backends.create(properties={ 'cn': NEW_BACKEND, 'nsslapd-suffix': NEW_SUFFIX, }) log.info('Disable the newly created suffix') mts = MappingTrees(standalone) mt_new = mts.get(NEW_SUFFIX) mt_new.replace('nsslapd-state', 'disabled') run_healthcheck_and_flush_log(topology_st, standalone, RET_CODE, json=False) run_healthcheck_and_flush_log(topology_st, standalone, RET_CODE, json=True) log.info('Enable the suffix again and check if nothing is broken') mt_new.replace('nsslapd-state', 'backend') > run_healthcheck_and_flush_log(topology_st, standalone, RET_CODE, json=False) suites/healthcheck/healthcheck_test.py:453: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ topology = <lib389.topologies.TopologyMain object at 0x7faa45b31e50> instance = <lib389.DirSrv object at 0x7faa45b31d00>, searched_code = 'DSBLE0002' json = False, searched_code2 = None, list_checks = False, list_errors = False check = None, searched_list = None def run_healthcheck_and_flush_log(topology, instance, searched_code=None, json=False, searched_code2=None, list_checks=False, list_errors=False, check=None, searched_list=None): args = FakeArgs() args.instance = instance.serverid args.verbose = instance.verbose args.list_errors = list_errors args.list_checks = list_checks args.check = check args.dry_run = False args.json = json log.info('Use healthcheck with --json == {} option'.format(json)) health_check_run(instance, topology.logcap.log, args) if searched_list is not None: for item in searched_list: assert topology.logcap.contains(item) log.info('Healthcheck returned searched item: %s' % item) else: > assert topology.logcap.contains(searched_code) E AssertionError: assert False E + where False = <bound method LogCapture.contains of <LogCapture (NOTSET)>>('DSBLE0002') E + where <bound method LogCapture.contains of <LogCapture (NOTSET)>> = <LogCapture (NOTSET)>.contains E + where <LogCapture (NOTSET)> = <lib389.topologies.TopologyMain object at 0x7faa45b31e50>.logcap suites/healthcheck/healthcheck_test.py:49: AssertionError -------------------------------Captured log call-------------------------------- [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking config:hr_timestamp ... [32mINFO [0m LogCapture:health.py:99 Checking config:passwordscheme ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userdata:cl_trimming ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userdata:mappingtree ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userdata:search ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userdata:virt_attrs ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:mappingtree ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:search ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... [32mINFO [0m LogCapture:health.py:99 Checking encryption:check_tls_version ... [32mINFO [0m LogCapture:health.py:99 Checking fschecks:file_perms ... [32mINFO [0m LogCapture:health.py:99 Checking refint:attr_indexes ... [32mINFO [0m LogCapture:health.py:99 Checking refint:update_delay ... [32mINFO [0m LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... [32mINFO [0m LogCapture:health.py:99 Checking replication:agmts_status ... [32mINFO [0m LogCapture:health.py:99 Checking replication:conflicts ... [32mINFO [0m LogCapture:health.py:99 Checking dseldif:nsstate ... [32mINFO [0m LogCapture:health.py:99 Checking tls:certificate_expiration ... [32mINFO [0m LogCapture:health.py:99 Checking logs:notes ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:119 4 Issues found! Generating report ... [32mINFO [0m LogCapture:health.py:45 [1] DS Lint Error: DSBLE0001 [32mINFO [0m LogCapture:health.py:46 -------------------------------------------------------------------------------- [32mINFO [0m LogCapture:health.py:47 Severity: MEDIUM [32mINFO [0m LogCapture:health.py:49 Check: backends:userdata:mappingtree [32mINFO [0m LogCapture:health.py:50 Affects: [32mINFO [0m LogCapture:health.py:52 -- userdata [32mINFO [0m LogCapture:health.py:53 Details: [32mINFO [0m LogCapture:health.py:54 ----------- [32mINFO [0m LogCapture:health.py:55 This backend may be missing the correct mapping tree references. Mapping Trees allow the directory server to determine which backend an operation is routed to in the abscence of other information. This is extremely important for correct functioning of LDAP ADD for example. A correct Mapping tree for this backend must contain the suffix name, the database name and be a backend type. IE: cn=o3Dexample,cn=mapping tree,cn=config cn: o=example nsslapd-backend: userRoot nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree [32mINFO [0m LogCapture:health.py:56 Resolution: [32mINFO [0m LogCapture:health.py:57 ----------- [32mINFO [0m LogCapture:health.py:58 Either you need to create the mapping tree, or you need to repair the related mapping tree. You will need to do this by hand by editing cn=config, or stopping the instance and editing dse.ldif. [32mINFO [0m LogCapture:health.py:45 [2] DS Lint Error: DSBLE0002 [32mINFO [0m LogCapture:health.py:46 -------------------------------------------------------------------------------- [32mINFO [0m LogCapture:health.py:47 Severity: HIGH [32mINFO [0m LogCapture:health.py:49 Check: backends:userdata:search [32mINFO [0m LogCapture:health.py:50 Affects: [32mINFO [0m LogCapture:health.py:52 -- dc=test,dc=com [32mINFO [0m LogCapture:health.py:53 Details: [32mINFO [0m LogCapture:health.py:54 ----------- [32mINFO [0m LogCapture:health.py:55 Unable to query the backend. LDAP error ({'msgtype': 101, 'msgid': 26, 'result': 1, 'desc': 'Operations error', 'ctrls': [], 'info': 'Warning: Operation attempted on a disabled node : dc=example,dc=com\n'}) [32mINFO [0m LogCapture:health.py:56 Resolution: [32mINFO [0m LogCapture:health.py:57 ----------- [32mINFO [0m LogCapture:health.py:58 Check the server's error and access logs for more information. [32mINFO [0m LogCapture:health.py:45 [3] DS Lint Error: DSBLE0001 [32mINFO [0m LogCapture:health.py:46 -------------------------------------------------------------------------------- [32mINFO [0m LogCapture:health.py:47 Severity: MEDIUM [32mINFO [0m LogCapture:health.py:49 Check: backends:userdata:mappingtree [32mINFO [0m LogCapture:health.py:50 Affects: [32mINFO [0m LogCapture:health.py:52 -- userdata [32mINFO [0m LogCapture:health.py:53 Details: [32mINFO [0m LogCapture:health.py:54 ----------- [32mINFO [0m LogCapture:health.py:55 This backend may be missing the correct mapping tree references. Mapping Trees allow the directory server to determine which backend an operation is routed to in the abscence of other information. This is extremely important for correct functioning of LDAP ADD for example. A correct Mapping tree for this backend must contain the suffix name, the database name and be a backend type. IE: cn=o3Dexample,cn=mapping tree,cn=config cn: o=example nsslapd-backend: userRoot nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree [32mINFO [0m LogCapture:health.py:56 Resolution: [32mINFO [0m LogCapture:health.py:57 ----------- [32mINFO [0m LogCapture:health.py:58 Either you need to create the mapping tree, or you need to repair the related mapping tree. You will need to do this by hand by editing cn=config, or stopping the instance and editing dse.ldif. [32mINFO [0m LogCapture:health.py:45 [4] DS Lint Error: DSBLE0002 [32mINFO [0m LogCapture:health.py:46 -------------------------------------------------------------------------------- [32mINFO [0m LogCapture:health.py:47 Severity: HIGH [32mINFO [0m LogCapture:health.py:49 Check: backends:userdata:search [32mINFO [0m LogCapture:health.py:50 Affects: [32mINFO [0m LogCapture:health.py:52 -- dc=test,dc=com [32mINFO [0m LogCapture:health.py:53 Details: [32mINFO [0m LogCapture:health.py:54 ----------- [32mINFO [0m LogCapture:health.py:55 Unable to query the backend. LDAP error ({'msgtype': 101, 'msgid': 26, 'result': 1, 'desc': 'Operations error', 'ctrls': [], 'info': 'Warning: Operation attempted on a disabled node : dc=example,dc=com\n'}) [32mINFO [0m LogCapture:health.py:56 Resolution: [32mINFO [0m LogCapture:health.py:57 ----------- [32mINFO [0m LogCapture:health.py:58 Check the server's error and access logs for more information. [32mINFO [0m LogCapture:health.py:124 ===== End Of Report (4 Issues found) ===== [32mINFO [0m LogCapture:health.py:126 [ { "dsle": "DSBLE0001", "severity": "MEDIUM", "description": "Possibly incorrect mapping tree.", "items": [ "userdata" ], "detail": "This backend may be missing the correct mapping tree references. Mapping Trees allow\nthe directory server to determine which backend an operation is routed to in the\nabscence of other information. This is extremely important for correct functioning\nof LDAP ADD for example.\n\nA correct Mapping tree for this backend must contain the suffix name, the database name\nand be a backend type. IE:\n\ncn=o3Dexample,cn=mapping tree,cn=config\ncn: o=example\nnsslapd-backend: userRoot\nnsslapd-state: backend\nobjectClass: top\nobjectClass: extensibleObject\nobjectClass: nsMappingTree\n\n", "fix": "Either you need to create the mapping tree, or you need to repair the related\nmapping tree. You will need to do this by hand by editing cn=config, or stopping\nthe instance and editing dse.ldif.\n", "check": "backends:userdata:mappingtree" }, { "dsle": "DSBLE0002", "severity": "HIGH", "description": "Unable to query backend.", "items": [ "dc=test,dc=com" ], "detail": "Unable to query the backend. LDAP error ({'msgtype': 101, 'msgid': 26, 'result': 1, 'desc': 'Operations error', 'ctrls': [], 'info': 'Warning: Operation attempted on a disabled node : dc=example,dc=com\\n'})", "fix": "Check the server's error and access logs for more information.", "check": "backends:userdata:search" }, { "dsle": "DSBLE0001", "severity": "MEDIUM", "description": "Possibly incorrect mapping tree.", "items": [ "userdata" ], "detail": "This backend may be missing the correct mapping tree references. Mapping Trees allow\nthe directory server to determine which backend an operation is routed to in the\nabscence of other information. This is extremely important for correct functioning\nof LDAP ADD for example.\n\nA correct Mapping tree for this backend must contain the suffix name, the database name\nand be a backend type. IE:\n\ncn=o3Dexample,cn=mapping tree,cn=config\ncn: o=example\nnsslapd-backend: userRoot\nnsslapd-state: backend\nobjectClass: top\nobjectClass: extensibleObject\nobjectClass: nsMappingTree\n\n", "fix": "Either you need to create the mapping tree, or you need to repair the related\nmapping tree. You will need to do this by hand by editing cn=config, or stopping\nthe instance and editing dse.ldif.\n", "check": "backends:userdata:mappingtree" }, { "dsle": "DSBLE0002", "severity": "HIGH", "description": "Unable to query backend.", "items": [ "dc=test,dc=com" ], "detail": "Unable to query the backend. LDAP error ({'msgtype': 101, 'msgid': 26, 'result': 1, 'desc': 'Operations error', 'ctrls': [], 'info': 'Warning: Operation attempted on a disabled node : dc=example,dc=com\\n'})", "fix": "Check the server's error and access logs for more information.", "check": "backends:userdata:search" } ] [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking config:hr_timestamp ... [32mINFO [0m LogCapture:health.py:99 Checking config:passwordscheme ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userdata:cl_trimming ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userdata:mappingtree ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userdata:search ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userdata:virt_attrs ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:mappingtree ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:search ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... [32mINFO [0m LogCapture:health.py:99 Checking encryption:check_tls_version ... [32mINFO [0m LogCapture:health.py:99 Checking fschecks:file_perms ... [32mINFO [0m LogCapture:health.py:99 Checking refint:attr_indexes ... [32mINFO [0m LogCapture:health.py:99 Checking refint:update_delay ... [32mINFO [0m LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... [32mINFO [0m LogCapture:health.py:99 Checking replication:agmts_status ... [32mINFO [0m LogCapture:health.py:99 Checking replication:conflicts ... [32mINFO [0m LogCapture:health.py:99 Checking dseldif:nsstate ... [32mINFO [0m LogCapture:health.py:99 Checking tls:certificate_expiration ... [32mINFO [0m LogCapture:health.py:99 Checking logs:notes ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:119 2 Issues found! Generating report ... [32mINFO [0m LogCapture:health.py:45 [1] DS Lint Error: DSBLE0003 [32mINFO [0m LogCapture:health.py:46 -------------------------------------------------------------------------------- [32mINFO [0m LogCapture:health.py:47 Severity: LOW [32mINFO [0m LogCapture:health.py:49 Check: backends:userdata:search [32mINFO [0m LogCapture:health.py:50 Affects: [32mINFO [0m LogCapture:health.py:52 -- dc=test,dc=com [32mINFO [0m LogCapture:health.py:53 Details: [32mINFO [0m LogCapture:health.py:54 ----------- [32mINFO [0m LogCapture:health.py:55 The backend database has not been initialized yet [32mINFO [0m LogCapture:health.py:56 Resolution: [32mINFO [0m LogCapture:health.py:57 ----------- [32mINFO [0m LogCapture:health.py:58 You need to import an LDIF file, or create the suffix entry, in order to initialize the database. [32mINFO [0m LogCapture:health.py:45 [2] DS Lint Error: DSBLE0003 [32mINFO [0m LogCapture:health.py:46 -------------------------------------------------------------------------------- [32mINFO [0m LogCapture:health.py:47 Severity: LOW [32mINFO [0m LogCapture:health.py:49 Check: backends:userdata:search [32mINFO [0m LogCapture:health.py:50 Affects: [32mINFO [0m LogCapture:health.py:52 -- dc=test,dc=com [32mINFO [0m LogCapture:health.py:53 Details: [32mINFO [0m LogCapture:health.py:54 ----------- [32mINFO [0m LogCapture:health.py:55 The backend database has not been initialized yet [32mINFO [0m LogCapture:health.py:56 Resolution: [32mINFO [0m LogCapture:health.py:57 ----------- [32mINFO [0m LogCapture:health.py:58 You need to import an LDIF file, or create the suffix entry, in order to initialize the database. [32mINFO [0m LogCapture:health.py:124 ===== End Of Report (2 Issues found) ===== | |||
XFailed | suites/replication/conflict_resolve_test.py::TestTwoMasters::test_memberof_groups | 0.00 | |
self = <tests.suites.replication.conflict_resolve_test.TestTwoMasters object at 0x7faa46579550> topology_m2 = <lib389.topologies.TopologyMain object at 0x7faa5583d820> base_m2 = <lib389.idm.nscontainer.nsContainer object at 0x7faa464eebb0> def test_memberof_groups(self, topology_m2, base_m2): """Check that conflict properly resolved for operations with memberOf and groups :id: 77f09b18-03d1-45da-940b-1ad2c2908eb3 :setup: Two master replication, test container for entries, enable plugin logging, audit log, error log for replica and access log for internal :steps: 1. Enable memberOf plugin 2. Add 30 users to m1 and wait for replication to happen 3. Pause replication 4. Create a group on m1 and m2 5. Create a group on m1 and m2, delete from m1 6. Create a group on m1, delete from m1, and create on m2, 7. Create a group on m2 and m1, delete from m1 8. Create two different groups on m2 9. Resume replication 10. Check that the entries on both masters are the same and replication is working :expectedresults: 1. It should pass 2. It should pass 3. It should pass 4. It should pass 5. It should pass 6. It should pass 7. It should pass 8. It should pass 9. It should pass 10. It should pass """ > pytest.xfail("Issue 49591 - work in progress") E _pytest.outcomes.XFailed: Issue 49591 - work in progress suites/replication/conflict_resolve_test.py:402: XFailed | |||
XFailed | suites/replication/conflict_resolve_test.py::TestTwoMasters::test_managed_entries | 0.00 | |
self = <tests.suites.replication.conflict_resolve_test.TestTwoMasters object at 0x7faa464c6220> topology_m2 = <lib389.topologies.TopologyMain object at 0x7faa5583d820> def test_managed_entries(self, topology_m2): """Check that conflict properly resolved for operations with managed entries :id: 77f09b18-03d1-45da-940b-1ad2c2908eb4 :setup: Two master replication, test container for entries, enable plugin logging, audit log, error log for replica and access log for internal :steps: 1. Create ou=managed_users and ou=managed_groups under test container 2. Configure managed entries plugin and add a template to test container 3. Add a user to m1 and wait for replication to happen 4. Pause replication 5. Create a user on m1 and m2 with a same group ID on both master 6. Create a user on m1 and m2 with a different group ID on both master 7. Resume replication 8. Check that the entries on both masters are the same and replication is working :expectedresults: 1. It should pass 2. It should pass 3. It should pass 4. It should pass 5. It should pass 6. It should pass 7. It should pass 8. It should pass """ > pytest.xfail("Issue 49591 - work in progress") E _pytest.outcomes.XFailed: Issue 49591 - work in progress suites/replication/conflict_resolve_test.py:493: XFailed | |||
XFailed | suites/replication/conflict_resolve_test.py::TestTwoMasters::test_nested_entries_with_children | 0.00 | |
self = <tests.suites.replication.conflict_resolve_test.TestTwoMasters object at 0x7faa4642fee0> topology_m2 = <lib389.topologies.TopologyMain object at 0x7faa5583d820> base_m2 = <lib389.idm.nscontainer.nsContainer object at 0x7faa4642f4c0> def test_nested_entries_with_children(self, topology_m2, base_m2): """Check that conflict properly resolved for operations with nested entries with children :id: 77f09b18-03d1-45da-940b-1ad2c2908eb5 :setup: Two master replication, test container for entries, enable plugin logging, audit log, error log for replica and access log for internal :steps: 1. Add 15 containers to m1 and wait for replication to happen 2. Pause replication 3. Create parent-child on master2 and master1 4. Create parent-child on master1 and master2 5. Create parent-child on master1 and master2 different child rdn 6. Create parent-child on master1 and delete parent on master2 7. Create parent on master1, delete it and parent-child on master2, delete them 8. Create parent on master1, delete it and parent-two children on master2 9. Create parent-two children on master1 and parent-child on master2, delete them 10. Create three subsets inside existing container entry, applying only part of changes on m2 11. Create more combinations of the subset with parent-child on m1 and parent on m2 12. Delete container on m1, modify user1 on m1, create parent on m2 and modify user2 on m2 13. Resume replication 14. Check that the entries on both masters are the same and replication is working :expectedresults: 1. It should pass 2. It should pass 3. It should pass 4. It should pass 5. It should pass 6. It should pass 7. It should pass 8. It should pass 9. It should pass 10. It should pass 11. It should pass 12. It should pass 13. It should pass 14. It should pass """ > pytest.xfail("Issue 49591 - work in progress") E _pytest.outcomes.XFailed: Issue 49591 - work in progress suites/replication/conflict_resolve_test.py:584: XFailed | |||
XFailed | suites/replication/conflict_resolve_test.py::TestThreeMasters::test_nested_entries | 0.00 | |
self = <tests.suites.replication.conflict_resolve_test.TestThreeMasters object at 0x7faa45e72e80> topology_m3 = <lib389.topologies.TopologyMain object at 0x7faa5b21fb80> base_m3 = <lib389.idm.nscontainer.nsContainer object at 0x7faa45d15310> def test_nested_entries(self, topology_m3, base_m3): """Check that conflict properly resolved for operations with nested entries with children :id: 77f09b18-03d1-45da-940b-1ad2c2908eb6 :setup: Three master replication, test container for entries, enable plugin logging, audit log, error log for replica and access log for internal :steps: 1. Add 15 containers to m1 and wait for replication to happen 2. Pause replication 3. Create two child entries under each of two entries 4. Create three child entries under each of three entries 5. Create two parents on m1 and m2, then on m1 - create a child and delete one parent, on m2 - delete one parent and create a child 6. Test a few more parent-child combinations with three instances 7. Resume replication 8. Check that the entries on both masters are the same and replication is working :expectedresults: 1. It should pass 2. It should pass 3. It should pass 4. It should pass 5. It should pass 6. It should pass 7. It should pass 8. It should pass """ > pytest.xfail("Issue 49591 - work in progress") E _pytest.outcomes.XFailed: Issue 49591 - work in progress suites/replication/conflict_resolve_test.py:968: XFailed -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master3 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'master3', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ae4d8c62-522a-411d-a795-367a9fed88f3 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 3b85979a-5cd3-4b94-b3f5-d574e846e955 / got description=ae4d8c62-522a-411d-a795-367a9fed88f3) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master3 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 85d2a7bc-aff0-486c-b6b7-12a3a2888925 / got description=6584a987-9c8c-4d6b-80a0-61612ef90865) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 85d2a7bc-aff0-486c-b6b7-12a3a2888925 / got description=6584a987-9c8c-4d6b-80a0-61612ef90865) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 85d2a7bc-aff0-486c-b6b7-12a3a2888925 / got description=6584a987-9c8c-4d6b-80a0-61612ef90865) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 85d2a7bc-aff0-486c-b6b7-12a3a2888925 / got description=6584a987-9c8c-4d6b-80a0-61612ef90865) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master3 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master3 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master2 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created | |||
XFailed | suites/replication/replica_config_test.py::test_agmt_num_add[nsds5ReplicaPort-0-65535-9999999999999999999999999999999999999999999999999999999999999999999-invalid-389] | 0.11 | |
topo = <lib389.topologies.TopologyMain object at 0x7faa550e4850> attr = 'nsds5ReplicaPort', too_small = '0', too_big = '65535' overflow = '9999999999999999999999999999999999999999999999999999999999999999999' notnum = 'invalid', valid = '389' @pytest.mark.xfail(reason="Agreement validation current does not work.") @pytest.mark.parametrize("attr, too_small, too_big, overflow, notnum, valid", agmt_attrs) def test_agmt_num_add(topo, attr, too_small, too_big, overflow, notnum, valid): """Test all the number values you can set for a replica config entry :id: a8b47d4a-a089-4d70-8070-e6181209bf94 :parametrized: yes :setup: standalone instance :steps: 1. Use a value that is too small 2. Use a value that is too big 3. Use a value that overflows the int 4. Use a value with character value (not a number) 5. Use a valid value :expectedresults: 1. Add is rejected 2. Add is rejected 3. Add is rejected 4. Add is rejected 5. Add is allowed """ agmt_reset(topo) replica = replica_setup(topo) agmts = Agreements(topo.standalone, basedn=replica.dn) # Test too small perform_invalid_create(agmts, agmt_dict, attr, too_small) # Test too big > perform_invalid_create(agmts, agmt_dict, attr, too_big) suites/replication/replica_config_test.py:217: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ many = <lib389.agreement.Agreements object at 0x7faa45e7e310> properties = {'cn': 'test_agreement', 'nsDS5ReplicaBindDN': 'uid=tester', 'nsDS5ReplicaBindMethod': 'SIMPLE', 'nsDS5ReplicaHost': 'localhost.localdomain', ...} attr = 'nsds5ReplicaPort', value = '65535' def perform_invalid_create(many, properties, attr, value): my_properties = copy.deepcopy(properties) my_properties[attr] = value with pytest.raises(ldap.LDAPError) as ei: > many.create(properties=my_properties) E Failed: DID NOT RAISE <class 'ldap.LDAPError'> suites/replication/replica_config_test.py:108: Failed | |||
XFailed | suites/replication/replica_config_test.py::test_agmt_num_modify[nsds5ReplicaPort-0-65535-9999999999999999999999999999999999999999999999999999999999999999999-invalid-389] | 0.23 | |
topo = <lib389.topologies.TopologyMain object at 0x7faa550e4850> attr = 'nsds5ReplicaPort', too_small = '0', too_big = '65535' overflow = '9999999999999999999999999999999999999999999999999999999999999999999' notnum = 'invalid', valid = '389' @pytest.mark.xfail(reason="Agreement validation current does not work.") @pytest.mark.parametrize("attr, too_small, too_big, overflow, notnum, valid", agmt_attrs) def test_agmt_num_modify(topo, attr, too_small, too_big, overflow, notnum, valid): """Test all the number values you can set for a replica config entry :id: a8b47d4a-a089-4d70-8070-e6181209bf95 :parametrized: yes :setup: standalone instance :steps: 1. Replace a value that is too small 2. Replace a value that is too big 3. Replace a value that overflows the int 4. Replace a value with character value (not a number) 5. Replace a vlue with a valid value :expectedresults: 1. Value is rejected 2. Value is rejected 3. Value is rejected 4. Value is rejected 5. Value is allowed """ agmt = agmt_setup(topo) # Value too small > perform_invalid_modify(agmt, attr, too_small) suites/replication/replica_config_test.py:253: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ o = <lib389.agreement.Agreement object at 0x7faa548e0190> attr = 'nsds5ReplicaPort', value = '0' def perform_invalid_modify(o, attr, value): with pytest.raises(ldap.LDAPError) as ei: > o.replace(attr, value) E Failed: DID NOT RAISE <class 'ldap.LDAPError'> suites/replication/replica_config_test.py:113: Failed | |||
XFailed | suites/replication/replica_config_test.py::test_agmt_num_modify[nsds5ReplicaTimeout--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] | 0.26 | |
topo = <lib389.topologies.TopologyMain object at 0x7faa550e4850> attr = 'nsds5ReplicaTimeout', too_small = '-1', too_big = '9223372036854775807' overflow = '9999999999999999999999999999999999999999999999999999999999999999999' notnum = 'invalid', valid = '6' @pytest.mark.xfail(reason="Agreement validation current does not work.") @pytest.mark.parametrize("attr, too_small, too_big, overflow, notnum, valid", agmt_attrs) def test_agmt_num_modify(topo, attr, too_small, too_big, overflow, notnum, valid): """Test all the number values you can set for a replica config entry :id: a8b47d4a-a089-4d70-8070-e6181209bf95 :parametrized: yes :setup: standalone instance :steps: 1. Replace a value that is too small 2. Replace a value that is too big 3. Replace a value that overflows the int 4. Replace a value with character value (not a number) 5. Replace a vlue with a valid value :expectedresults: 1. Value is rejected 2. Value is rejected 3. Value is rejected 4. Value is rejected 5. Value is allowed """ agmt = agmt_setup(topo) # Value too small perform_invalid_modify(agmt, attr, too_small) # Value too big > perform_invalid_modify(agmt, attr, too_big) suites/replication/replica_config_test.py:255: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ o = <lib389.agreement.Agreement object at 0x7faa46305910> attr = 'nsds5ReplicaTimeout', value = '9223372036854775807' def perform_invalid_modify(o, attr, value): with pytest.raises(ldap.LDAPError) as ei: > o.replace(attr, value) E Failed: DID NOT RAISE <class 'ldap.LDAPError'> suites/replication/replica_config_test.py:113: Failed | |||
XFailed | suites/replication/replica_config_test.py::test_agmt_num_modify[nsds5ReplicaBusyWaitTime--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] | 0.24 | |
topo = <lib389.topologies.TopologyMain object at 0x7faa550e4850> attr = 'nsds5ReplicaBusyWaitTime', too_small = '-1' too_big = '9223372036854775807' overflow = '9999999999999999999999999999999999999999999999999999999999999999999' notnum = 'invalid', valid = '6' @pytest.mark.xfail(reason="Agreement validation current does not work.") @pytest.mark.parametrize("attr, too_small, too_big, overflow, notnum, valid", agmt_attrs) def test_agmt_num_modify(topo, attr, too_small, too_big, overflow, notnum, valid): """Test all the number values you can set for a replica config entry :id: a8b47d4a-a089-4d70-8070-e6181209bf95 :parametrized: yes :setup: standalone instance :steps: 1. Replace a value that is too small 2. Replace a value that is too big 3. Replace a value that overflows the int 4. Replace a value with character value (not a number) 5. Replace a vlue with a valid value :expectedresults: 1. Value is rejected 2. Value is rejected 3. Value is rejected 4. Value is rejected 5. Value is allowed """ agmt = agmt_setup(topo) # Value too small perform_invalid_modify(agmt, attr, too_small) # Value too big > perform_invalid_modify(agmt, attr, too_big) suites/replication/replica_config_test.py:255: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ o = <lib389.agreement.Agreement object at 0x7faa46300580> attr = 'nsds5ReplicaBusyWaitTime', value = '9223372036854775807' def perform_invalid_modify(o, attr, value): with pytest.raises(ldap.LDAPError) as ei: > o.replace(attr, value) E Failed: DID NOT RAISE <class 'ldap.LDAPError'> suites/replication/replica_config_test.py:113: Failed | |||
XFailed | suites/replication/replica_config_test.py::test_agmt_num_modify[nsds5ReplicaSessionPauseTime--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] | 0.25 | |
topo = <lib389.topologies.TopologyMain object at 0x7faa550e4850> attr = 'nsds5ReplicaSessionPauseTime', too_small = '-1' too_big = '9223372036854775807' overflow = '9999999999999999999999999999999999999999999999999999999999999999999' notnum = 'invalid', valid = '6' @pytest.mark.xfail(reason="Agreement validation current does not work.") @pytest.mark.parametrize("attr, too_small, too_big, overflow, notnum, valid", agmt_attrs) def test_agmt_num_modify(topo, attr, too_small, too_big, overflow, notnum, valid): """Test all the number values you can set for a replica config entry :id: a8b47d4a-a089-4d70-8070-e6181209bf95 :parametrized: yes :setup: standalone instance :steps: 1. Replace a value that is too small 2. Replace a value that is too big 3. Replace a value that overflows the int 4. Replace a value with character value (not a number) 5. Replace a vlue with a valid value :expectedresults: 1. Value is rejected 2. Value is rejected 3. Value is rejected 4. Value is rejected 5. Value is allowed """ agmt = agmt_setup(topo) # Value too small perform_invalid_modify(agmt, attr, too_small) # Value too big > perform_invalid_modify(agmt, attr, too_big) suites/replication/replica_config_test.py:255: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ o = <lib389.agreement.Agreement object at 0x7faa463005b0> attr = 'nsds5ReplicaSessionPauseTime', value = '9223372036854775807' def perform_invalid_modify(o, attr, value): with pytest.raises(ldap.LDAPError) as ei: > o.replace(attr, value) E Failed: DID NOT RAISE <class 'ldap.LDAPError'> suites/replication/replica_config_test.py:113: Failed | |||
XFailed | suites/replication/replica_config_test.py::test_agmt_num_modify[nsds5ReplicaFlowControlWindow--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] | 0.23 | |
topo = <lib389.topologies.TopologyMain object at 0x7faa550e4850> attr = 'nsds5ReplicaFlowControlWindow', too_small = '-1' too_big = '9223372036854775807' overflow = '9999999999999999999999999999999999999999999999999999999999999999999' notnum = 'invalid', valid = '6' @pytest.mark.xfail(reason="Agreement validation current does not work.") @pytest.mark.parametrize("attr, too_small, too_big, overflow, notnum, valid", agmt_attrs) def test_agmt_num_modify(topo, attr, too_small, too_big, overflow, notnum, valid): """Test all the number values you can set for a replica config entry :id: a8b47d4a-a089-4d70-8070-e6181209bf95 :parametrized: yes :setup: standalone instance :steps: 1. Replace a value that is too small 2. Replace a value that is too big 3. Replace a value that overflows the int 4. Replace a value with character value (not a number) 5. Replace a vlue with a valid value :expectedresults: 1. Value is rejected 2. Value is rejected 3. Value is rejected 4. Value is rejected 5. Value is allowed """ agmt = agmt_setup(topo) # Value too small perform_invalid_modify(agmt, attr, too_small) # Value too big > perform_invalid_modify(agmt, attr, too_big) suites/replication/replica_config_test.py:255: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ o = <lib389.agreement.Agreement object at 0x7faa46339f70> attr = 'nsds5ReplicaFlowControlWindow', value = '9223372036854775807' def perform_invalid_modify(o, attr, value): with pytest.raises(ldap.LDAPError) as ei: > o.replace(attr, value) E Failed: DID NOT RAISE <class 'ldap.LDAPError'> suites/replication/replica_config_test.py:113: Failed | |||
XFailed | suites/replication/replica_config_test.py::test_agmt_num_modify[nsds5ReplicaFlowControlPause--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] | 0.23 | |
topo = <lib389.topologies.TopologyMain object at 0x7faa550e4850> attr = 'nsds5ReplicaFlowControlPause', too_small = '-1' too_big = '9223372036854775807' overflow = '9999999999999999999999999999999999999999999999999999999999999999999' notnum = 'invalid', valid = '6' @pytest.mark.xfail(reason="Agreement validation current does not work.") @pytest.mark.parametrize("attr, too_small, too_big, overflow, notnum, valid", agmt_attrs) def test_agmt_num_modify(topo, attr, too_small, too_big, overflow, notnum, valid): """Test all the number values you can set for a replica config entry :id: a8b47d4a-a089-4d70-8070-e6181209bf95 :parametrized: yes :setup: standalone instance :steps: 1. Replace a value that is too small 2. Replace a value that is too big 3. Replace a value that overflows the int 4. Replace a value with character value (not a number) 5. Replace a vlue with a valid value :expectedresults: 1. Value is rejected 2. Value is rejected 3. Value is rejected 4. Value is rejected 5. Value is allowed """ agmt = agmt_setup(topo) # Value too small perform_invalid_modify(agmt, attr, too_small) # Value too big > perform_invalid_modify(agmt, attr, too_big) suites/replication/replica_config_test.py:255: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ o = <lib389.agreement.Agreement object at 0x7faa46443eb0> attr = 'nsds5ReplicaFlowControlPause', value = '9223372036854775807' def perform_invalid_modify(o, attr, value): with pytest.raises(ldap.LDAPError) as ei: > o.replace(attr, value) E Failed: DID NOT RAISE <class 'ldap.LDAPError'> suites/replication/replica_config_test.py:113: Failed | |||
XFailed | suites/replication/replica_config_test.py::test_agmt_num_modify[nsds5ReplicaProtocolTimeout--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] | 0.22 | |
topo = <lib389.topologies.TopologyMain object at 0x7faa550e4850> attr = 'nsds5ReplicaProtocolTimeout', too_small = '-1' too_big = '9223372036854775807' overflow = '9999999999999999999999999999999999999999999999999999999999999999999' notnum = 'invalid', valid = '6' @pytest.mark.xfail(reason="Agreement validation current does not work.") @pytest.mark.parametrize("attr, too_small, too_big, overflow, notnum, valid", agmt_attrs) def test_agmt_num_modify(topo, attr, too_small, too_big, overflow, notnum, valid): """Test all the number values you can set for a replica config entry :id: a8b47d4a-a089-4d70-8070-e6181209bf95 :parametrized: yes :setup: standalone instance :steps: 1. Replace a value that is too small 2. Replace a value that is too big 3. Replace a value that overflows the int 4. Replace a value with character value (not a number) 5. Replace a vlue with a valid value :expectedresults: 1. Value is rejected 2. Value is rejected 3. Value is rejected 4. Value is rejected 5. Value is allowed """ agmt = agmt_setup(topo) # Value too small perform_invalid_modify(agmt, attr, too_small) # Value too big > perform_invalid_modify(agmt, attr, too_big) suites/replication/replica_config_test.py:255: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ o = <lib389.agreement.Agreement object at 0x7faa4646dc10> attr = 'nsds5ReplicaProtocolTimeout', value = '9223372036854775807' def perform_invalid_modify(o, attr, value): with pytest.raises(ldap.LDAPError) as ei: > o.replace(attr, value) E Failed: DID NOT RAISE <class 'ldap.LDAPError'> suites/replication/replica_config_test.py:113: Failed | |||
XFailed | suites/replication/ruvstore_test.py::test_memoryruv_sync_with_databaseruv | 0.07 | |
topo = <lib389.topologies.TopologyMain object at 0x7faa5b198640> @pytest.mark.xfail(reason="No method to safety access DB ruv currently exists online.") def test_memoryruv_sync_with_databaseruv(topo): """Check if memory ruv and database ruv are synced :id: 5f38ac5f-6353-460d-bf60-49cafffda5b3 :setup: Replication with two masters. :steps: 1. Add user to server and compare memory ruv and database ruv. 2. Modify description of user and compare memory ruv and database ruv. 3. Modrdn of user and compare memory ruv and database ruv. 4. Delete user and compare memory ruv and database ruv. :expectedresults: 1. For add user, the memory ruv and database ruv should be the same. 2. For modify operation, the memory ruv and database ruv should be the same. 3. For modrdn operation, the memory ruv and database ruv should be the same. 4. For delete operation, the memory ruv and database ruv should be the same. """ log.info('Adding user: {} to master1'.format(TEST_ENTRY_NAME)) users = UserAccounts(topo.ms['master1'], DEFAULT_SUFFIX) tuser = users.create(properties=USER_PROPERTIES) > _compare_memoryruv_and_databaseruv(topo, 'add') suites/replication/ruvstore_test.py:139: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ topo = <lib389.topologies.TopologyMain object at 0x7faa5b198640> operation_type = 'add' def _compare_memoryruv_and_databaseruv(topo, operation_type): """Compare the memoryruv and databaseruv for ldap operations""" log.info('Checking memory ruv for ldap: {} operation'.format(operation_type)) replicas = Replicas(topo.ms['master1']) replica = replicas.list()[0] memory_ruv = replica.get_attr_val_utf8('nsds50ruv') log.info('Checking database ruv for ldap: {} operation'.format(operation_type)) > entry = replicas.get_ruv_entry(DEFAULT_SUFFIX) E AttributeError: 'Replicas' object has no attribute 'get_ruv_entry' suites/replication/ruvstore_test.py:81: AttributeError -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.ruvstore_test:ruvstore_test.py:136 Adding user: rep2lusr to master1 [32mINFO [0m tests.suites.replication.ruvstore_test:ruvstore_test.py:75 Checking memory ruv for ldap: add operation [32mINFO [0m tests.suites.replication.ruvstore_test:ruvstore_test.py:80 Checking database ruv for ldap: add operation | |||
XPassed | suites/acl/syntax_test.py::test_aci_invalid_syntax_fail[test_Use_double_equal_instead_of_equal_in_the_targetattr] | 0.05 | |
No log output captured. | |||
XPassed | suites/acl/syntax_test.py::test_aci_invalid_syntax_fail[test_Use_double_equal_instead_of_equal_in_the_targetfilter] | 0.03 | |
No log output captured. | |||
XPassed | suites/replication/replica_config_test.py::test_agmt_num_add[nsds5ReplicaTimeout--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] | 0.25 | |
No log output captured. | |||
XPassed | suites/replication/replica_config_test.py::test_agmt_num_add[nsds5ReplicaBusyWaitTime--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] | 0.28 | |
No log output captured. | |||
XPassed | suites/replication/replica_config_test.py::test_agmt_num_add[nsds5ReplicaSessionPauseTime--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] | 0.28 | |
No log output captured. | |||
XPassed | suites/replication/replica_config_test.py::test_agmt_num_add[nsds5ReplicaFlowControlWindow--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] | 0.27 | |
No log output captured. | |||
XPassed | suites/replication/replica_config_test.py::test_agmt_num_add[nsds5ReplicaFlowControlPause--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] | 0.26 | |
No log output captured. | |||
XPassed | suites/replication/replica_config_test.py::test_agmt_num_add[nsds5ReplicaProtocolTimeout--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] | 0.27 | |
No log output captured. | |||
Skipped | suites/auth_token/basic_auth_test.py::test_ldap_auth_token_config::setup | 0.00 | |
('suites/auth_token/basic_auth_test.py', 28, 'Skipped: Auth tokens are not available in older versions') | |||
Skipped | suites/auth_token/basic_auth_test.py::test_ldap_auth_token_nsuser::setup | 0.00 | |
('suites/auth_token/basic_auth_test.py', 75, 'Skipped: Auth tokens are not available in older versions') | |||
Skipped | suites/auth_token/basic_auth_test.py::test_ldap_auth_token_disabled::setup | 0.00 | |
('suites/auth_token/basic_auth_test.py', 144, 'Skipped: Auth tokens are not available in older versions') | |||
Skipped | suites/auth_token/basic_auth_test.py::test_ldap_auth_token_directory_manager::setup | 0.00 | |
('suites/auth_token/basic_auth_test.py', 194, 'Skipped: Auth tokens are not available in older versions') | |||
Skipped | suites/auth_token/basic_auth_test.py::test_ldap_auth_token_anonymous::setup | 0.00 | |
('suites/auth_token/basic_auth_test.py', 217, 'Skipped: Auth tokens are not available in older versions') | |||
Skipped | suites/config/regression_test.py::test_set_cachememsize_to_custom_value::setup | 0.00 | |
('suites/config/regression_test.py', 34, 'Skipped: available memory is too low') | |||
Skipped | suites/ds_logs/ds_logs_test.py::test_etime_at_border_of_second::setup | 0.00 | |
('suites/ds_logs/ds_logs_test.py', 736, 'Skipped: rsearch was removed') | |||
Skipped | suites/entryuuid/basic_test.py::test_entryuuid_indexed_import_and_search::setup | 0.00 | |
('suites/entryuuid/basic_test.py', 73, 'Skipped: Entryuuid is not available in older versions') | |||
Skipped | suites/entryuuid/basic_test.py::test_entryuuid_unindexed_import_and_search::setup | 0.00 | |
('suites/entryuuid/basic_test.py', 113, 'Skipped: Entryuuid is not available in older versions') | |||
Skipped | suites/entryuuid/basic_test.py::test_entryuuid_generation_on_add::setup | 0.00 | |
('suites/entryuuid/basic_test.py', 155, 'Skipped: Entryuuid is not available in older versions') | |||
Skipped | suites/entryuuid/basic_test.py::test_entryuuid_fixup_task::setup | 0.00 | |
('suites/entryuuid/basic_test.py', 179, 'Skipped: Entryuuid is not available in older versions') | |||
Skipped | suites/memory_leaks/MMR_double_free_test.py::test_MMR_double_free::setup | 0.00 | |
('suites/memory_leaks/MMR_double_free_test.py', 67, "Skipped: Don't run if ASAN is not enabled") | |||
Skipped | suites/memory_leaks/range_search_test.py::test_range_search::setup | 0.00 | |
('suites/memory_leaks/range_search_test.py', 24, "Skipped: Don't run if ASAN is not enabled") | |||
Skipped | suites/migration/export_data_test.py::test_export_data_from_source_host::setup | 0.00 | |
('suites/migration/export_data_test.py', 24, 'Skipped: This test is meant to execute in specific test environment') | |||
Skipped | suites/migration/import_data_test.py::test_import_data_to_target_host::setup | 0.00 | |
('suites/migration/import_data_test.py', 24, 'Skipped: This test is meant to execute in specific test environment') | |||
Skipped | suites/replication/changelog_test.py::test_cldump_files_removed::setup | 0.00 | |
('suites/replication/changelog_test.py', 235, 'Skipped: does not work for prefix builds') | |||
Skipped | suites/replication/changelog_test.py::test_changelog_compactdbinterval::setup | 0.00 | |
('suites/replication/changelog_test.py', 630, 'Skipped: changelog compaction is done by the backend itself, with id2entry as well, nsslapd-changelogcompactdb-interval is no longer supported') | |||
Skipped | suites/rewriters/adfilter_test.py::test_adfilter_objectSid::setup | 0.00 | |
('suites/rewriters/adfilter_test.py', 90, 'Skipped: It is missing samba python bindings') | |||
Skipped | tickets/ticket47462_test.py::test_ticket47462::setup | 0.00 | |
('tickets/ticket47462_test.py', 39, 'Skipped: Upgrade scripts are supported only on versions < 1.4.x') | |||
Skipped | tickets/ticket47815_test.py::test_ticket47815::setup | 0.00 | |
('tickets/ticket47815_test.py', 26, 'Skipped: Not implemented, or invalid by nsMemberOf') | |||
Skipped | tickets/ticket49121_test.py::test_ticket49121::setup | 0.00 | |
('tickets/ticket49121_test.py', 32, "Skipped: Don't run if ASAN is not enabled") | |||
Passed | suites/acl/acivattr_test.py::test_positive[(ENG_USER, ENG_MANAGER, REAL_EQ_ACI)] | 0.08 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/acl/acivattr_test.py::test_positive[(ENG_USER, ENG_MANAGER, REAL_PRES_ACI)] | 0.07 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_positive[(ENG_USER, ENG_MANAGER, REAL_SUB_ACI)] | 0.07 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_positive[(ENG_USER, ENG_MANAGER, ROLE_PRES_ACI)] | 0.06 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_positive[(ENG_USER, ENG_MANAGER, ROLE_SUB_ACI)] | 0.06 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_positive[(ENG_USER, ENG_MANAGER, COS_EQ_ACI)] | 0.11 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_positive[(ENG_USER, ENG_MANAGER, COS_PRES_ACI)] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_positive[(ENG_USER, ENG_MANAGER, COS_SUB_ACI)] | 0.09 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_positive[(ENG_USER, ENG_MANAGER, LDAPURL_ACI)] | 0.11 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_negative[(ENG_USER, SALES_MANAGER, REAL_EQ_ACI)] | 0.10 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_negative[(ENG_USER, SALES_OU, REAL_PRES_ACI)] | 0.11 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_negative[(ENG_USER, SALES_MANAGER, REAL_SUB_ACI)] | 0.10 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_negative[(ENG_USER, SALES_MANAGER, ROLE_EQ_ACI)] | 0.10 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_negative[(ENG_USER, SALES_MANAGER, ROLE_PRES_ACI)] | 0.07 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_negative[(ENG_USER, SALES_MANAGER, ROLE_SUB_ACI)] | 0.09 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_negative[(ENG_USER, SALES_MANAGER, COS_EQ_ACI)] | 0.23 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_negative[(ENG_USER, SALES_MANAGER, COS_PRES_ACI)] | 0.07 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_negative[(ENG_USER, SALES_MANAGER, COS_SUB_ACI)] | 0.07 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_negative[(SALES_UESER, SALES_MANAGER, LDAPURL_ACI)] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_negative[(ENG_USER, ENG_MANAGER, ROLE_EQ_ACI)] | 0.07 | |
No log output captured. | |||
Passed | suites/acl/acl_deny_test.py::test_multi_deny_aci | 12.87 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389:acl_deny_test.py:47 Add uid=tuser1,ou=People,dc=example,dc=com [32mINFO [0m lib389:acl_deny_test.py:58 Add uid=tuser,ou=People,dc=example,dc=com -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:acl_deny_test.py:90 Pass 1 [32mINFO [0m lib389:acl_deny_test.py:93 Testing two searches behave the same... [32mINFO [0m lib389:acl_deny_test.py:136 Testing search does not return any entries... [32mINFO [0m lib389:acl_deny_test.py:90 Pass 2 [32mINFO [0m lib389:acl_deny_test.py:93 Testing two searches behave the same... [32mINFO [0m lib389:acl_deny_test.py:136 Testing search does not return any entries... [32mINFO [0m lib389:acl_deny_test.py:200 Test PASSED | |||
Passed | suites/acl/acl_test.py::test_aci_attr_subtype_targetattr[lang-ja] | 0.01 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect a1fa3c01-fdb4-4521-b88b-79f3b8069be8 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 999b04a0-68ec-4740-918f-60b8a6055827 / got description=a1fa3c01-fdb4-4521-b88b-79f3b8069be8) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m tests.suites.acl.acl_test:acl_test.py:76 ========Executing test with 'lang-ja' subtype======== [32mINFO [0m tests.suites.acl.acl_test:acl_test.py:77 Add a target attribute [32mINFO [0m tests.suites.acl.acl_test:acl_test.py:80 Add a user attribute [32mINFO [0m tests.suites.acl.acl_test:acl_test.py:88 Add an ACI with attribute subtype -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.acl.acl_test:acl_test.py:118 Search for the added attribute [32mINFO [0m tests.suites.acl.acl_test:acl_test.py:125 The added attribute was found | |||
Passed | suites/acl/acl_test.py::test_aci_attr_subtype_targetattr[binary] | 0.01 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.acl.acl_test:acl_test.py:76 ========Executing test with 'binary' subtype======== [32mINFO [0m tests.suites.acl.acl_test:acl_test.py:77 Add a target attribute [32mINFO [0m tests.suites.acl.acl_test:acl_test.py:80 Add a user attribute [32mINFO [0m tests.suites.acl.acl_test:acl_test.py:88 Add an ACI with attribute subtype -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.acl.acl_test:acl_test.py:118 Search for the added attribute [32mINFO [0m tests.suites.acl.acl_test:acl_test.py:125 The added attribute was found | |||
Passed | suites/acl/acl_test.py::test_aci_attr_subtype_targetattr[phonetic] | 0.01 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.acl.acl_test:acl_test.py:76 ========Executing test with 'phonetic' subtype======== [32mINFO [0m tests.suites.acl.acl_test:acl_test.py:77 Add a target attribute [32mINFO [0m tests.suites.acl.acl_test:acl_test.py:80 Add a user attribute [32mINFO [0m tests.suites.acl.acl_test:acl_test.py:88 Add an ACI with attribute subtype -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.acl.acl_test:acl_test.py:118 Search for the added attribute [32mINFO [0m tests.suites.acl.acl_test:acl_test.py:125 The added attribute was found | |||
Passed | suites/acl/acl_test.py::test_mode_default_add_deny | 0.04 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389:acl_test.py:233 ######## INITIALIZATION ######## [32mINFO [0m lib389:acl_test.py:236 Add uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:254 Add cn=staged user,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:258 Add cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:262 Add cn=excepts,cn=accounts,dc=example,dc=com -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:acl_test.py:294 ######## mode moddn_aci : ADD (should fail) ######## [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:302 Try to add cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:311 Exception (expected): INSUFFICIENT_ACCESS | |||
Passed | suites/acl/acl_test.py::test_mode_default_delete_deny | 0.03 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:acl_test.py:329 ######## DELETE (should fail) ######## [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:336 Try to delete cn=staged user,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:341 Exception (expected): INSUFFICIENT_ACCESS | |||
Passed | suites/acl/acl_test.py::test_moddn_staging_prod[0-cn=staged user,dc=example,dc=com-cn=accounts,dc=example,dc=com-False] | 0.33 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:acl_test.py:376 ######## MOVE staging -> Prod (0) ######## [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:388 Try to MODDN uid=new_account0,cn=staged user,dc=example,dc=com -> uid=new_account0,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:395 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m lib389:acl_test.py:399 ######## MOVE to and from equality filter ######## [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:406 Try to MODDN uid=new_account0,cn=staged user,dc=example,dc=com -> uid=new_account0,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com | |||
Passed | suites/acl/acl_test.py::test_moddn_staging_prod[1-cn=staged user,dc=example,dc=com-cn=accounts,dc=example,dc=com-False] | 0.33 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:acl_test.py:376 ######## MOVE staging -> Prod (1) ######## [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:388 Try to MODDN uid=new_account1,cn=staged user,dc=example,dc=com -> uid=new_account1,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:395 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m lib389:acl_test.py:399 ######## MOVE to and from equality filter ######## [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:406 Try to MODDN uid=new_account1,cn=staged user,dc=example,dc=com -> uid=new_account1,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com | |||
Passed | suites/acl/acl_test.py::test_moddn_staging_prod[2-cn=staged user,dc=example,dc=com-cn=bad*,dc=example,dc=com-True] | 0.23 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:acl_test.py:376 ######## MOVE staging -> Prod (2) ######## [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:388 Try to MODDN uid=new_account2,cn=staged user,dc=example,dc=com -> uid=new_account2,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:395 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m lib389:acl_test.py:399 ######## MOVE to and from equality filter ######## [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:406 Try to MODDN uid=new_account2,cn=staged user,dc=example,dc=com -> uid=new_account2,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:409 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com | |||
Passed | suites/acl/acl_test.py::test_moddn_staging_prod[3-cn=st*,dc=example,dc=com-cn=accounts,dc=example,dc=com-False] | 0.21 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:acl_test.py:376 ######## MOVE staging -> Prod (3) ######## [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:388 Try to MODDN uid=new_account3,cn=staged user,dc=example,dc=com -> uid=new_account3,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:395 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m lib389:acl_test.py:399 ######## MOVE to and from equality filter ######## [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:406 Try to MODDN uid=new_account3,cn=staged user,dc=example,dc=com -> uid=new_account3,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com | |||
Passed | suites/acl/acl_test.py::test_moddn_staging_prod[4-cn=bad*,dc=example,dc=com-cn=accounts,dc=example,dc=com-True] | 0.23 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:acl_test.py:376 ######## MOVE staging -> Prod (4) ######## [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:388 Try to MODDN uid=new_account4,cn=staged user,dc=example,dc=com -> uid=new_account4,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:395 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m lib389:acl_test.py:399 ######## MOVE to and from equality filter ######## [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:406 Try to MODDN uid=new_account4,cn=staged user,dc=example,dc=com -> uid=new_account4,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:409 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com | |||
Passed | suites/acl/acl_test.py::test_moddn_staging_prod[5-cn=st*,dc=example,dc=com-cn=ac*,dc=example,dc=com-False] | 0.25 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:acl_test.py:376 ######## MOVE staging -> Prod (5) ######## [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:388 Try to MODDN uid=new_account5,cn=staged user,dc=example,dc=com -> uid=new_account5,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:395 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m lib389:acl_test.py:399 ######## MOVE to and from equality filter ######## [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:406 Try to MODDN uid=new_account5,cn=staged user,dc=example,dc=com -> uid=new_account5,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com | |||
Passed | suites/acl/acl_test.py::test_moddn_staging_prod[6-None-cn=ac*,dc=example,dc=com-False] | 0.55 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:acl_test.py:376 ######## MOVE staging -> Prod (6) ######## [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:388 Try to MODDN uid=new_account6,cn=staged user,dc=example,dc=com -> uid=new_account6,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:395 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m lib389:acl_test.py:399 ######## MOVE to and from equality filter ######## [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:406 Try to MODDN uid=new_account6,cn=staged user,dc=example,dc=com -> uid=new_account6,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com | |||
Passed | suites/acl/acl_test.py::test_moddn_staging_prod[7-cn=st*,dc=example,dc=com-None-False] | 0.27 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:acl_test.py:376 ######## MOVE staging -> Prod (7) ######## [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:388 Try to MODDN uid=new_account7,cn=staged user,dc=example,dc=com -> uid=new_account7,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:395 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m lib389:acl_test.py:399 ######## MOVE to and from equality filter ######## [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:406 Try to MODDN uid=new_account7,cn=staged user,dc=example,dc=com -> uid=new_account7,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com | |||
Passed | suites/acl/acl_test.py::test_moddn_staging_prod[8-None-None-False] | 0.26 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:acl_test.py:376 ######## MOVE staging -> Prod (8) ######## [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:388 Try to MODDN uid=new_account8,cn=staged user,dc=example,dc=com -> uid=new_account8,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:395 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m lib389:acl_test.py:399 ######## MOVE to and from equality filter ######## [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:406 Try to MODDN uid=new_account8,cn=staged user,dc=example,dc=com -> uid=new_account8,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com | |||
Passed | suites/acl/acl_test.py::test_moddn_staging_prod_9 | 1.04 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:acl_test.py:453 ######## MOVE staging -> Prod (9) ######## [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:466 Try to MODDN uid=new_account9,cn=staged user,dc=example,dc=com -> uid=new_account9,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:473 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m lib389:acl_test.py:479 Disable the moddn right [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:484 ######## MOVE to and from equality filter ######## [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:492 Try to MODDN uid=new_account9,cn=staged user,dc=example,dc=com -> uid=new_account9,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:499 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:521 Try to MODDN uid=new_account9,cn=staged user,dc=example,dc=com -> uid=new_account9,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:532 Enable the moddn right [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:536 ######## MOVE staging -> Prod (10) ######## [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:548 Try to MODDN uid=new_account10,cn=staged user,dc=example,dc=com -> uid=new_account10,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:555 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:572 Try to MODDN uid=new_account10,cn=staged user,dc=example,dc=com -> uid=new_account10,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:579 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:588 ######## MOVE to and from equality filter ######## [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:594 Try to MODDN uid=new_account10,cn=staged user,dc=example,dc=com -> uid=new_account10,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com | |||
Passed | suites/acl/acl_test.py::test_moddn_prod_staging | 0.73 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:acl_test.py:623 ######## MOVE staging -> Prod (11) ######## [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:636 Try to MODDN uid=new_account11,cn=staged user,dc=example,dc=com -> uid=new_account11,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:643 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m lib389:acl_test.py:647 ######## MOVE to and from equality filter ######## [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:653 Try to MODDN uid=new_account11,cn=staged user,dc=example,dc=com -> uid=new_account11,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:668 Try to move back MODDN uid=new_account11,cn=accounts,dc=example,dc=com -> uid=new_account11,cn=staged user,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:675 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com | |||
Passed | suites/acl/acl_test.py::test_check_repl_M2_to_M1 | 1.06 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:acl_test.py:705 Bind as cn=Directory Manager (M2) [32mINFO [0m lib389:acl_test.py:725 Update (M2) uid=new_account12,cn=staged user,dc=example,dc=com (description) [32mINFO [0m lib389:acl_test.py:738 Update uid=new_account12,cn=staged user,dc=example,dc=com (description) replicated on M1 | |||
Passed | suites/acl/acl_test.py::test_moddn_staging_prod_except | 0.48 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:acl_test.py:763 ######## MOVE staging -> Prod (13) ######## [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:775 Try to MODDN uid=new_account13,cn=staged user,dc=example,dc=com -> uid=new_account13,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:782 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m lib389:acl_test.py:786 ######## MOVE to and from equality filter ######## [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:160 Add a DENY aci under cn=excepts,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:793 Try to MODDN uid=new_account13,cn=staged user,dc=example,dc=com -> uid=new_account13,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:799 ######## MOVE staging -> Prod/Except (14) ######## [32mINFO [0m lib389:acl_test.py:805 Try to MODDN uid=new_account14,cn=staged user,dc=example,dc=com -> uid=new_account14,cn=excepts,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:812 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:160 Add a DENY aci under cn=excepts,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com | |||
Passed | suites/acl/acl_test.py::test_mode_default_ger_no_moddn | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:acl_test.py:839 ######## mode moddn_aci : GER no moddn ######## [32mINFO [0m lib389:acl_test.py:850 dn: cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:850 dn: cn=excepts,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:850 dn: uid=new_account0,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:850 dn: uid=new_account1,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:850 dn: uid=new_account3,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:850 dn: uid=new_account5,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:850 dn: uid=new_account6,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:850 dn: uid=new_account7,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:850 dn: uid=new_account8,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:850 dn: uid=new_account9,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:850 dn: uid=new_account10,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:850 dn: uid=new_account11,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:850 dn: uid=new_account13,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:853 ######## entryLevelRights: b'v' | |||
Passed | suites/acl/acl_test.py::test_mode_default_ger_with_moddn | 0.20 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:acl_test.py:877 ######## mode moddn_aci: GER with moddn ######## [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:895 dn: cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:895 dn: cn=excepts,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:895 dn: uid=new_account0,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:895 dn: uid=new_account1,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:895 dn: uid=new_account3,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:895 dn: uid=new_account5,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:895 dn: uid=new_account6,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:895 dn: uid=new_account7,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:895 dn: uid=new_account8,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:895 dn: uid=new_account9,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:895 dn: uid=new_account10,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:895 dn: uid=new_account11,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:895 dn: uid=new_account13,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:898 ######## entryLevelRights: b'vn' [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com | |||
Passed | suites/acl/acl_test.py::test_mode_legacy_ger_no_moddn1 | 0.05 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:acl_test.py:928 ######## Disable the moddn aci mod ######## [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:932 ######## mode legacy 1: GER no moddn ######## [32mINFO [0m lib389:acl_test.py:942 dn: cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:942 dn: cn=excepts,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:942 dn: uid=new_account0,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:942 dn: uid=new_account1,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:942 dn: uid=new_account3,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:942 dn: uid=new_account5,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:942 dn: uid=new_account6,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:942 dn: uid=new_account7,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:942 dn: uid=new_account8,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:942 dn: uid=new_account9,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:942 dn: uid=new_account10,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:942 dn: uid=new_account11,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:942 dn: uid=new_account13,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:945 ######## entryLevelRights: b'v' | |||
Passed | suites/acl/acl_test.py::test_mode_legacy_ger_no_moddn2 | 0.20 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:acl_test.py:971 ######## Disable the moddn aci mod ######## [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:975 ######## mode legacy 2: GER no moddn ######## [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:992 dn: cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:992 dn: cn=excepts,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:992 dn: uid=new_account0,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:992 dn: uid=new_account1,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:992 dn: uid=new_account3,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:992 dn: uid=new_account5,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:992 dn: uid=new_account6,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:992 dn: uid=new_account7,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:992 dn: uid=new_account8,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:992 dn: uid=new_account9,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:992 dn: uid=new_account10,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:992 dn: uid=new_account11,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:992 dn: uid=new_account13,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:995 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com | |||
Passed | suites/acl/acl_test.py::test_mode_legacy_ger_with_moddn | 0.37 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:acl_test.py:1031 ######## Disable the moddn aci mod ######## [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:1035 ######## mode legacy : GER with moddn ######## [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager [32mINFO [0m lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1057 dn: cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1057 dn: cn=excepts,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1057 dn: uid=new_account0,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1057 dn: uid=new_account1,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1057 dn: uid=new_account3,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1057 dn: uid=new_account5,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1057 dn: uid=new_account6,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1057 dn: uid=new_account7,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1057 dn: uid=new_account8,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1057 dn: uid=new_account9,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1057 dn: uid=new_account10,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1057 dn: uid=new_account11,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1057 dn: uid=new_account13,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1060 ######## entryLevelRights: b'vn' [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager | |||
Passed | suites/acl/acl_test.py::test_rdn_write_get_ger | 0.01 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389:acl_test.py:1071 ######## Add entry tuser ######## -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:acl_test.py:1097 ######## GER rights for anonymous ######## [32mINFO [0m lib389:acl_test.py:1107 dn: dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: ou=groups,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: ou=people,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: ou=permissions,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: ou=services,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: uid=demo_user,ou=people,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: cn=demo_group,ou=groups,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: cn=group_admin,ou=permissions,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: cn=group_modify,ou=permissions,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: cn=user_admin,ou=permissions,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: cn=user_modify,ou=permissions,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: cn=user_passwd_reset,ou=permissions,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: cn=user_private_read,ou=permissions,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: cn=replication_managers,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: cn=ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701,ou=services,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: cn=ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702,ou=services,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: uid=bind_entry,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: cn=staged user,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: cn=excepts,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: uid=new_account0,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: uid=new_account1,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: uid=new_account2,cn=staged user,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: uid=new_account3,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: uid=new_account4,cn=staged user,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: uid=new_account5,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: uid=new_account6,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: uid=new_account7,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: uid=new_account8,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: uid=new_account9,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: uid=new_account10,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: uid=new_account11,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: uid=new_account12,cn=staged user,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: uid=new_account13,cn=accounts,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: uid=new_account14,cn=staged user,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: uid=new_account15,cn=staged user,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: uid=new_account16,cn=staged user,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: uid=new_account17,cn=staged user,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: uid=new_account18,cn=staged user,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: uid=new_account19,cn=staged user,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' [32mINFO [0m lib389:acl_test.py:1107 dn: cn=tuser,dc=example,dc=com [32mINFO [0m lib389:acl_test.py:1109 ######## entryLevelRights: b'v' | |||
Passed | suites/acl/acl_test.py::test_rdn_write_modrdn_anonymous | 0.07 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:acl_test.py:1136 dn: [32mINFO [0m lib389:acl_test.py:1138 ######## 'objectClass': [b'top'] [32mINFO [0m lib389:acl_test.py:1138 ######## 'defaultnamingcontext': [b'dc=example,dc=com'] [32mINFO [0m lib389:acl_test.py:1138 ######## 'dataversion': [b'020201110002457'] [32mINFO [0m lib389:acl_test.py:1138 ######## 'netscapemdsuffix': [b'cn=ldap://dc=localhost,dc=localdomain:39001'] [32mINFO [0m lib389:acl_test.py:1143 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m lib389:acl_test.py:1150 The entry was not renamed (expected) [32mINFO [0m lib389:acl_test.py:133 Bind as cn=Directory Manager | |||
Passed | suites/acl/deladd_test.py::test_allow_delete_access_to_groupdn | 0.09 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/acl/deladd_test.py::test_allow_add_access_to_anyone | 0.07 | |
No log output captured. | |||
Passed | suites/acl/deladd_test.py::test_allow_delete_access_to_anyone | 0.07 | |
No log output captured. | |||
Passed | suites/acl/deladd_test.py::test_allow_delete_access_not_to_userdn | 0.09 | |
No log output captured. | |||
Passed | suites/acl/deladd_test.py::test_allow_delete_access_not_to_group | 0.09 | |
No log output captured. | |||
Passed | suites/acl/deladd_test.py::test_allow_add_access_to_parent | 0.07 | |
No log output captured. | |||
Passed | suites/acl/deladd_test.py::test_allow_delete_access_to_parent | 0.08 | |
No log output captured. | |||
Passed | suites/acl/deladd_test.py::test_allow_delete_access_to_dynamic_group | 0.07 | |
No log output captured. | |||
Passed | suites/acl/deladd_test.py::test_allow_delete_access_to_dynamic_group_uid | 0.10 | |
No log output captured. | |||
Passed | suites/acl/deladd_test.py::test_allow_delete_access_not_to_dynamic_group | 0.09 | |
No log output captured. | |||
Passed | suites/acl/enhanced_aci_modrnd_test.py::test_enhanced_aci_modrnd | 0.03 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m tests.suites.acl.enhanced_aci_modrnd_test:enhanced_aci_modrnd_test.py:32 Add a container: ou=test_ou_1,dc=example,dc=com [32mINFO [0m tests.suites.acl.enhanced_aci_modrnd_test:enhanced_aci_modrnd_test.py:38 Add a container: ou=test_ou_2,dc=example,dc=com [32mINFO [0m tests.suites.acl.enhanced_aci_modrnd_test:enhanced_aci_modrnd_test.py:44 Add a user: cn=test_user,ou=test_ou_1,dc=example,dc=com [32mINFO [0m tests.suites.acl.enhanced_aci_modrnd_test:enhanced_aci_modrnd_test.py:58 Add an ACI 'allow (all)' by cn=test_user,ou=test_ou_1,dc=example,dc=com to the ou=test_ou_1,dc=example,dc=com [32mINFO [0m tests.suites.acl.enhanced_aci_modrnd_test:enhanced_aci_modrnd_test.py:62 Add an ACI 'allow (all)' by cn=test_user,ou=test_ou_1,dc=example,dc=com to the ou=test_ou_2,dc=example,dc=com -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.acl.enhanced_aci_modrnd_test:enhanced_aci_modrnd_test.py:93 Bind as cn=test_user,ou=test_ou_1,dc=example,dc=com [32mINFO [0m tests.suites.acl.enhanced_aci_modrnd_test:enhanced_aci_modrnd_test.py:97 User MODRDN operation from ou=test_ou_1,dc=example,dc=com to ou=test_ou_2,dc=example,dc=com [32mINFO [0m tests.suites.acl.enhanced_aci_modrnd_test:enhanced_aci_modrnd_test.py:103 Check there is no user in ou=test_ou_1,dc=example,dc=com [32mINFO [0m tests.suites.acl.enhanced_aci_modrnd_test:enhanced_aci_modrnd_test.py:109 Check there is our user in ou=test_ou_2,dc=example,dc=com | |||
Passed | suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_five | 0.11 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_six | 0.09 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_seven | 0.05 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_eight | 0.06 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_nine | 0.05 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_ten | 0.08 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_eleven | 0.05 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_twelve | 0.16 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_fourteen | 0.11 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_fifteen | 0.07 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_sixteen | 0.30 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_seventeen | 0.04 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_eighteen | 0.05 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_test.py::test_caching_changes | 0.09 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/acl/globalgroup_test.py::test_deny_group_member_all_rights_to_user | 0.08 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_test.py::test_deny_group_member_all_rights_to_group_members | 0.30 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_test.py::test_deeply_nested_groups_aci_denial | 0.34 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_test.py::test_deeply_nested_groups_aci_denial_two | 0.03 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_test.py::test_deeply_nested_groups_aci_allow | 0.03 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_test.py::test_deeply_nested_groups_aci_allow_two | 0.04 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_test.py::test_undefined_in_group_eval | 0.05 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_test.py::test_undefined_in_group_eval_two | 0.03 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_test.py::test_undefined_in_group_eval_three | 0.03 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_test.py::test_undefined_in_group_eval_four | 0.08 | |
No log output captured. | |||
Passed | suites/acl/keywords_part2_test.py::test_ip_keyword_test_noip_cannot | 0.13 | |
No log output captured. | |||
Passed | suites/acl/keywords_part2_test.py::test_user_can_access_the_data_at_any_time | 0.10 | |
No log output captured. | |||
Passed | suites/acl/keywords_part2_test.py::test_user_can_access_the_data_only_in_the_morning | 0.13 | |
No log output captured. | |||
Passed | suites/acl/keywords_part2_test.py::test_user_can_access_the_data_only_in_the_afternoon | 0.14 | |
No log output captured. | |||
Passed | suites/acl/keywords_part2_test.py::test_timeofday_keyword | 1.21 | |
No log output captured. | |||
Passed | suites/acl/keywords_part2_test.py::test_dayofweek_keyword_test_everyday_can_access | 0.14 | |
No log output captured. | |||
Passed | suites/acl/keywords_part2_test.py::test_dayofweek_keyword_today_can_access | 0.14 | |
No log output captured. | |||
Passed | suites/acl/keywords_part2_test.py::test_user_cannot_access_the_data_at_all | 0.15 | |
No log output captured. | |||
Passed | suites/acl/keywords_test.py::test_user_binds_with_a_password_and_can_access_the_data | 0.06 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/acl/keywords_test.py::test_user_binds_with_a_bad_password_and_cannot_access_the_data | 0.01 | |
No log output captured. | |||
Passed | suites/acl/keywords_test.py::test_anonymous_user_cannot_access_the_data | 0.05 | |
No log output captured. | |||
Passed | suites/acl/keywords_test.py::test_authenticated_but_has_no_rigth_on_the_data | 0.06 | |
No log output captured. | |||
Passed | suites/acl/keywords_test.py::test_the_bind_client_is_accessing_the_directory | 0.02 | |
No log output captured. | |||
Passed | suites/acl/keywords_test.py::test_users_binds_with_a_password_and_can_access_the_data | 0.02 | |
No log output captured. | |||
Passed | suites/acl/keywords_test.py::test_user_binds_without_any_password_and_cannot_access_the_data | 0.03 | |
No log output captured. | |||
Passed | suites/acl/keywords_test.py::test_user_can_access_the_data_when_connecting_from_any_machine | 0.07 | |
No log output captured. | |||
Passed | suites/acl/keywords_test.py::test_user_can_access_the_data_when_connecting_from_internal_ds_network_only | 0.05 | |
No log output captured. | |||
Passed | suites/acl/keywords_test.py::test_user_can_access_the_data_when_connecting_from_some_network_only | 0.07 | |
No log output captured. | |||
Passed | suites/acl/keywords_test.py::test_from_an_unauthorized_network | 0.08 | |
No log output captured. | |||
Passed | suites/acl/keywords_test.py::test_user_cannot_access_the_data_when_connecting_from_an_unauthorized_network_2 | 0.07 | |
No log output captured. | |||
Passed | suites/acl/keywords_test.py::test_user_cannot_access_the_data_if_not_from_a_certain_domain | 0.07 | |
No log output captured. | |||
Passed | suites/acl/keywords_test.py::test_dnsalias_keyword_test_nodns_cannot | 1.13 | |
No log output captured. | |||
Passed | suites/acl/keywords_test.py::test_user_can_access_from_ipv4_or_ipv6_address[127.0.0.1] | 0.05 | |
No log output captured. | |||
Passed | suites/acl/keywords_test.py::test_user_can_access_from_ipv4_or_ipv6_address[[::1]] | 0.02 | |
No log output captured. | |||
Passed | suites/acl/misc_test.py::test_accept_aci_in_addition_to_acl | 0.37 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/acl/misc_test.py::test_more_then_40_acl_will_crash_slapd | 0.49 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389:misc_test.py:76 Exception (expected): ALREADY_EXISTS | |||
Passed | suites/acl/misc_test.py::test_search_access_should_not_include_read_access | 0.02 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389:misc_test.py:76 Exception (expected): ALREADY_EXISTS | |||
Passed | suites/acl/misc_test.py::test_only_allow_some_targetattr | 0.11 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389:misc_test.py:76 Exception (expected): ALREADY_EXISTS | |||
Passed | suites/acl/misc_test.py::test_only_allow_some_targetattr_two | 0.47 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389:misc_test.py:76 Exception (expected): ALREADY_EXISTS | |||
Passed | suites/acl/misc_test.py::test_memberurl_needs_to_be_normalized | 0.17 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389:misc_test.py:76 Exception (expected): ALREADY_EXISTS | |||
Passed | suites/acl/misc_test.py::test_greater_than_200_acls_can_be_created | 5.78 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389:misc_test.py:76 Exception (expected): ALREADY_EXISTS | |||
Passed | suites/acl/misc_test.py::test_server_bahaves_properly_with_very_long_attribute_names | 0.11 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389:misc_test.py:76 Exception (expected): ALREADY_EXISTS | |||
Passed | suites/acl/misc_test.py::test_do_bind_as_201_distinct_users | 216.46 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389:misc_test.py:76 Exception (expected): ALREADY_EXISTS | |||
Passed | suites/acl/modify_test.py::test_allow_write_access_to_targetattr_with_a_single_attribute | 0.84 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/acl/modify_test.py::test_allow_write_access_to_targetattr_with_multiple_attibutes | 0.08 | |
No log output captured. | |||
Passed | suites/acl/modify_test.py::test_allow_write_access_to_userdn_all | 0.15 | |
No log output captured. | |||
Passed | suites/acl/modify_test.py::test_allow_write_access_to_userdn_with_wildcards_in_dn | 0.07 | |
No log output captured. | |||
Passed | suites/acl/modify_test.py::test_allow_write_access_to_userdn_with_multiple_dns | 0.26 | |
No log output captured. | |||
Passed | suites/acl/modify_test.py::test_allow_write_access_to_target_with_wildcards | 0.28 | |
No log output captured. | |||
Passed | suites/acl/modify_test.py::test_allow_write_access_to_userdnattr | 0.13 | |
No log output captured. | |||
Passed | suites/acl/modify_test.py::test_allow_selfwrite_access_to_anyone | 0.61 | |
No log output captured. | |||
Passed | suites/acl/modify_test.py::test_uniquemember_should_also_be_the_owner | 0.32 | |
No log output captured. | |||
Passed | suites/acl/modify_test.py::test_aci_with_both_allow_and_deny | 0.21 | |
No log output captured. | |||
Passed | suites/acl/modify_test.py::test_allow_owner_to_modify_entry | 0.12 | |
No log output captured. | |||
Passed | suites/acl/modrdn_test.py::test_allow_write_privilege_to_anyone | 0.04 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/acl/modrdn_test.py::test_allow_write_privilege_to_dynamic_group_with_scope_set_to_base_in_ldap_url | 0.04 | |
No log output captured. | |||
Passed | suites/acl/modrdn_test.py::test_write_access_to_naming_atributes | 0.05 | |
No log output captured. | |||
Passed | suites/acl/modrdn_test.py::test_write_access_to_naming_atributes_two | 0.15 | |
No log output captured. | |||
Passed | suites/acl/modrdn_test.py::test_access_aci_list_contains_any_deny_rule | 0.22 | |
No log output captured. | |||
Passed | suites/acl/modrdn_test.py::test_renaming_target_entry | 0.09 | |
No log output captured. | |||
Passed | suites/acl/repeated_ldap_add_test.py::test_repeated_ldap_add | 31.79 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. ------------------------------Captured stdout call------------------------------ Entry uid=buser123,ou=BOU,dc=example,dc=com is locked -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:184 Testing Bug 1347760 - Information disclosure via repeated use of LDAP ADD operation, etc. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:186 Disabling accesslog logbuffering [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:189 Bind as {cn=Directory Manager,password} [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:192 Adding ou=BOU a bind user belongs to. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:197 Adding a bind user. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:204 Adding a test user. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:211 Deleting aci in dc=example,dc=com. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:214 While binding as DM, acquire an access log path and instance dir [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:220 Bind case 1. the bind user has no rights to read the entry itself, bind should be successful. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:221 Bind as {uid=buser123,ou=BOU,dc=example,dc=com,buser123} who has no access rights. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:229 Access log path: /var/log/dirsrv/slapd-standalone1/access [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:231 Bind case 2-1. the bind user does not exist, bind should fail with error INVALID_CREDENTIALS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:233 Bind as {uid=bogus,dc=example,dc=com,bogus} who does not exist. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:237 Exception (expected): INVALID_CREDENTIALS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:238 Desc Invalid credentials [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:246 Cause found - [09/Nov/2020:19:32:06.634298947 -0500] conn=1 op=11 RESULT err=49 tag=97 nentries=0 wtime=0.000241255 optime=0.003808212 etime=0.004047217 - No such entry [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:249 Bind case 2-2. the bind user's suffix does not exist, bind should fail with error INVALID_CREDENTIALS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:251 Bind as {uid=bogus,ou=people,dc=bogus,bogus} who does not exist. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:260 Cause found - [09/Nov/2020:19:32:07.648376997 -0500] conn=1 op=12 RESULT err=49 tag=97 nentries=0 wtime=0.000291015 optime=0.005461270 etime=0.005743005 - No suffix for bind dn found [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:263 Bind case 2-3. the bind user's password is wrong, bind should fail with error INVALID_CREDENTIALS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:265 Bind as {uid=buser123,ou=BOU,dc=example,dc=com,bogus} who does not exist. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:269 Exception (expected): INVALID_CREDENTIALS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:270 Desc Invalid credentials [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:278 Cause found - [09/Nov/2020:19:32:08.690635104 -0500] conn=1 op=13 RESULT err=49 tag=97 nentries=0 wtime=0.000179315 optime=0.037265838 etime=0.037435551 - Invalid credentials [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:281 Adding aci for uid=buser123,ou=BOU,dc=example,dc=com to ou=BOU,dc=example,dc=com. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:283 aci: (targetattr="*")(version 3.0; acl "buser123"; allow(all) userdn = "ldap:///uid=buser123,ou=BOU,dc=example,dc=com";) [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:284 Bind as {cn=Directory Manager,password} [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:289 Bind case 3. the bind user has the right to read the entry itself, bind should be successful. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:290 Bind as {uid=buser123,ou=BOU,dc=example,dc=com,buser123} which should be ok. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:293 The following operations are against the subtree the bind user uid=buser123,ou=BOU,dc=example,dc=com has no rights. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:297 Search case 1. the bind user has no rights to read the search entry, it should return no search results with <class 'ldap.SUCCESS'> [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Searching existing entry uid=tuser0,ou=people,dc=example,dc=com, which should be ok. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:141 Search should return none [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:303 Search case 2-1. the search entry does not exist, the search should return no search results with SUCCESS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Searching non-existing entry uid=bogus,dc=example,dc=com, which should be ok. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:141 Search should return none [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:309 Search case 2-2. the search entry does not exist, the search should return no search results with SUCCESS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Searching non-existing entry uid=bogus,ou=people,dc=example,dc=com, which should be ok. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:141 Search should return none [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:316 Add case 1. the bind user has no rights AND the adding entry exists, it should fail with INSUFFICIENT_ACCESS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Adding existing entry uid=tuser0,ou=people,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:322 Add case 2-1. the bind user has no rights AND the adding entry does not exist, it should fail with INSUFFICIENT_ACCESS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Adding non-existing entry uid=bogus,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:328 Add case 2-2. the bind user has no rights AND the adding entry does not exist, it should fail with INSUFFICIENT_ACCESS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Adding non-existing entry uid=bogus,ou=people,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:335 Modify case 1. the bind user has no rights AND the modifying entry exists, it should fail with INSUFFICIENT_ACCESS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Modifying existing entry uid=tuser0,ou=people,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:341 Modify case 2-1. the bind user has no rights AND the modifying entry does not exist, it should fail with INSUFFICIENT_ACCESS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Modifying non-existing entry uid=bogus,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:347 Modify case 2-2. the bind user has no rights AND the modifying entry does not exist, it should fail with INSUFFICIENT_ACCESS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Modifying non-existing entry uid=bogus,ou=people,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:354 Modrdn case 1. the bind user has no rights AND the renaming entry exists, it should fail with INSUFFICIENT_ACCESS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Renaming existing entry uid=tuser0,ou=people,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:360 Modrdn case 2-1. the bind user has no rights AND the renaming entry does not exist, it should fail with INSUFFICIENT_ACCESS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Renaming non-existing entry uid=bogus,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:366 Modrdn case 2-2. the bind user has no rights AND the renaming entry does not exist, it should fail with INSUFFICIENT_ACCESS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Renaming non-existing entry uid=bogus,ou=people,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:372 Modrdn case 3. the bind user has no rights AND the node moving an entry to exists, it should fail with INSUFFICIENT_ACCESS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Moving to existing superior ou=groups,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:378 Modrdn case 4-1. the bind user has no rights AND the node moving an entry to does not, it should fail with INSUFFICIENT_ACCESS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Moving to non-existing superior ou=OU,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:384 Modrdn case 4-2. the bind user has no rights AND the node moving an entry to does not, it should fail with INSUFFICIENT_ACCESS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Moving to non-existing superior ou=OU,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:391 Delete case 1. the bind user has no rights AND the deleting entry exists, it should fail with INSUFFICIENT_ACCESS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Deleting existing entry uid=tuser0,ou=people,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:397 Delete case 2-1. the bind user has no rights AND the deleting entry does not exist, it should fail with INSUFFICIENT_ACCESS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Deleting non-existing entry uid=bogus,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:403 Delete case 2-2. the bind user has no rights AND the deleting entry does not exist, it should fail with INSUFFICIENT_ACCESS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Deleting non-existing entry uid=bogus,ou=people,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:407 EXTRA: Check no regressions [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:408 Adding aci for uid=buser123,ou=BOU,dc=example,dc=com to dc=example,dc=com. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:410 Bind as {cn=Directory Manager,password} [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:415 Bind as {uid=buser123,ou=BOU,dc=example,dc=com,buser123}. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:425 Search case. the search entry does not exist, the search should fail with NO_SUCH_OBJECT [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Searching non-existing entry uid=bogus,ou=people,dc=example,dc=com, which should fail with NO_SUCH_OBJECT. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): NO_SUCH_OBJECT [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc No such object [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:431 Add case. the adding entry already exists, it should fail with ALREADY_EXISTS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Adding existing entry uid=tuser0,ou=people,dc=example,dc=com, which should fail with ALREADY_EXISTS. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): ALREADY_EXISTS [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Already exists [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:436 Modify case. the modifying entry does not exist, it should fail with NO_SUCH_OBJECT [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Modifying non-existing entry uid=bogus,dc=example,dc=com, which should fail with NO_SUCH_OBJECT. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): NO_SUCH_OBJECT [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc No such object [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:441 Modrdn case 1. the renaming entry does not exist, it should fail with NO_SUCH_OBJECT [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Renaming non-existing entry uid=bogus,dc=example,dc=com, which should fail with NO_SUCH_OBJECT. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): NO_SUCH_OBJECT [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc No such object [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:446 Modrdn case 2. the node moving an entry to does not, it should fail with NO_SUCH_OBJECT [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Moving to non-existing superior ou=OU,dc=example,dc=com, which should fail with NO_SUCH_OBJECT. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): NO_SUCH_OBJECT [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc No such object [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:451 Delete case. the deleting entry does not exist, it should fail with NO_SUCH_OBJECT [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Deleting non-existing entry uid=bogus,dc=example,dc=com, which should fail with NO_SUCH_OBJECT. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): NO_SUCH_OBJECT [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc No such object [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:454 Inactivate uid=buser123,ou=BOU,dc=example,dc=com [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:461 ['/usr/sbin/dsidm', 'standalone1', '-b', 'dc=example,dc=com', 'account', 'lock', 'uid=buser123,ou=BOU,dc=example,dc=com'] [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:465 Bind as {uid=buser123,ou=BOU,dc=example,dc=com,buser123} which should fail with UNWILLING_TO_PERFORM. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:469 Exception (expected): UNWILLING_TO_PERFORM [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:470 Desc Server is unwilling to perform [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:473 Bind as {uid=buser123,ou=BOU,dc=example,dc=com,bogus} which should fail with UNWILLING_TO_PERFORM. [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:477 Exception (expected): UNWILLING_TO_PERFORM [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:478 Desc Server is unwilling to perform [32mINFO [0m tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:481 SUCCESS | |||
Passed | suites/acl/roledn_test.py::test_mod_seealso_positive[(STEVE_ROLE, NESTED_ROLE_TESTER)] | 0.09 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/acl/roledn_test.py::test_mod_seealso_positive[(HARRY_ROLE, NESTED_ROLE_TESTER)] | 0.07 | |
No log output captured. | |||
Passed | suites/acl/roledn_test.py::test_mod_seealso_positive[(MARY_ROLE, NOT_RULE_ACCESS)] | 0.10 | |
No log output captured. | |||
Passed | suites/acl/roledn_test.py::test_mod_seealso_positive[(STEVE_ROLE, OR_RULE_ACCESS)] | 0.06 | |
No log output captured. | |||
Passed | suites/acl/roledn_test.py::test_mod_seealso_positive[(HARRY_ROLE, OR_RULE_ACCESS)] | 0.05 | |
No log output captured. | |||
Passed | suites/acl/roledn_test.py::test_mod_seealso_positive[(STEVE_ROLE, ALL_ACCESS)] | 0.07 | |
No log output captured. | |||
Passed | suites/acl/roledn_test.py::test_mod_seealso_positive[(HARRY_ROLE, ALL_ACCESS)] | 0.05 | |
No log output captured. | |||
Passed | suites/acl/roledn_test.py::test_mod_seealso_positive[(MARY_ROLE, ALL_ACCESS)] | 0.09 | |
No log output captured. | |||
Passed | suites/acl/roledn_test.py::test_mod_seealso_negative[(MARY_ROLE, NESTED_ROLE_TESTER)] | 0.05 | |
No log output captured. | |||
Passed | suites/acl/roledn_test.py::test_mod_seealso_negative[(STEVE_ROLE, NOT_RULE_ACCESS)] | 0.11 | |
No log output captured. | |||
Passed | suites/acl/roledn_test.py::test_mod_seealso_negative[(HARRY_ROLE, NOT_RULE_ACCESS)] | 0.09 | |
No log output captured. | |||
Passed | suites/acl/roledn_test.py::test_mod_seealso_negative[(MARY_ROLE , OR_RULE_ACCESS)] | 0.07 | |
No log output captured. | |||
Passed | suites/acl/roledn_test.py::test_mod_anonseealso_positive[NOT_RULE_ACCESS] | 0.02 | |
No log output captured. | |||
Passed | suites/acl/roledn_test.py::test_mod_anonseealso_positive[ALL_ACCESS] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/roledn_test.py::test_mod_anonseealso_negaive[NESTED_ROLE_TESTER] | 0.04 | |
No log output captured. | |||
Passed | suites/acl/roledn_test.py::test_mod_anonseealso_negaive[OR_RULE_ACCESS] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/search_real_part2_test.py::test_deny_all_access_with__target_set_on_non_leaf | 0.74 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/acl/search_real_part2_test.py::test_deny_all_access_with__target_set_on_wildcard_non_leaf | 0.82 | |
No log output captured. | |||
Passed | suites/acl/search_real_part2_test.py::test_deny_all_access_with__target_set_on_wildcard_leaf | 0.81 | |
No log output captured. | |||
Passed | suites/acl/search_real_part2_test.py::test_deny_all_access_with_targetfilter_using_equality_search | 0.38 | |
No log output captured. | |||
Passed | suites/acl/search_real_part2_test.py::test_deny_all_access_with_targetfilter_using_equality_search_two | 0.75 | |
No log output captured. | |||
Passed | suites/acl/search_real_part2_test.py::test_deny_all_access_with_targetfilter_using_substring_search | 0.29 | |
No log output captured. | |||
Passed | suites/acl/search_real_part2_test.py::test_deny_all_access_with_targetfilter_using_substring_search_two | 2.04 | |
No log output captured. | |||
Passed | suites/acl/search_real_part2_test.py::test_deny_all_access_with_targetfilter_using_boolean_or_of_two_equality_search | 0.26 | |
No log output captured. | |||
Passed | suites/acl/search_real_part2_test.py::test_deny_all_access_to__userdn_two | 0.68 | |
No log output captured. | |||
Passed | suites/acl/search_real_part2_test.py::test_deny_all_access_with_userdn | 0.72 | |
No log output captured. | |||
Passed | suites/acl/search_real_part2_test.py::test_deny_all_access_with_targetfilter_using_presence_search | 0.22 | |
No log output captured. | |||
Passed | suites/acl/search_real_part3_test.py::test_deny_search_access_to_userdn_with_ldap_url | 0.91 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/acl/search_real_part3_test.py::test_deny_search_access_to_userdn_with_ldap_url_two | 0.80 | |
No log output captured. | |||
Passed | suites/acl/search_real_part3_test.py::test_deny_search_access_to_userdn_with_ldap_url_matching_all_users | 0.83 | |
No log output captured. | |||
Passed | suites/acl/search_real_part3_test.py::test_deny_read_access_to_a_dynamic_group | 0.55 | |
No log output captured. | |||
Passed | suites/acl/search_real_part3_test.py::test_deny_read_access_to_dynamic_group_with_host_port_set_on_ldap_url | 0.46 | |
No log output captured. | |||
Passed | suites/acl/search_real_part3_test.py::test_deny_read_access_to_dynamic_group_with_scope_set_to_one_in_ldap_url | 0.55 | |
No log output captured. | |||
Passed | suites/acl/search_real_part3_test.py::test_deny_read_access_to_dynamic_group_two | 0.95 | |
No log output captured. | |||
Passed | suites/acl/search_real_part3_test.py::test_deny_access_to_group_should_deny_access_to_all_uniquemember | 1.01 | |
No log output captured. | |||
Passed | suites/acl/search_real_part3_test.py::test_entry_with_lots_100_attributes | 13.24 | |
No log output captured. | |||
Passed | suites/acl/search_real_part3_test.py::test_groupdnattr_value_is_another_group | 0.48 | |
No log output captured. | |||
Passed | suites/acl/search_real_test.py::test_deny_all_access_with_target_set | 0.36 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/acl/search_real_test.py::test_deny_all_access_to_a_target_with_wild_card | 0.39 | |
No log output captured. | |||
Passed | suites/acl/search_real_test.py::test_deny_all_access_without_a_target_set | 2.33 | |
No log output captured. | |||
Passed | suites/acl/search_real_test.py::test_deny_read_search_and_compare_access_with_target_and_targetattr_set | 1.77 | |
No log output captured. | |||
Passed | suites/acl/search_real_test.py::test_deny_read_access_to_multiple_groupdns | 1.29 | |
No log output captured. | |||
Passed | suites/acl/search_real_test.py::test_deny_all_access_to_userdnattr | 0.31 | |
No log output captured. | |||
Passed | suites/acl/search_real_test.py::test_deny_all_access_with__target_set | 0.77 | |
No log output captured. | |||
Passed | suites/acl/search_real_test.py::test_deny_all_access_with__targetattr_set | 2.32 | |
No log output captured. | |||
Passed | suites/acl/search_real_test.py::test_deny_all_access_with_targetattr_set | 1.99 | |
No log output captured. | |||
Passed | suites/acl/selfdn_permissions_test.py::test_selfdn_permission_add | 0.76 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389:selfdn_permissions_test.py:58 Add OCticket47653 that allows 'member' attribute [32mINFO [0m lib389:selfdn_permissions_test.py:63 Add cn=bind_entry, dc=example,dc=com -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:selfdn_permissions_test.py:106 ######################### ADD ###################### [32mINFO [0m lib389:selfdn_permissions_test.py:109 Bind as cn=bind_entry, dc=example,dc=com [32mINFO [0m lib389:selfdn_permissions_test.py:139 Try to add Add cn=test_entry, dc=example,dc=com (aci is missing): dn: cn=test_entry, dc=example,dc=com cn: test_entry member: cn=bind_entry, dc=example,dc=com objectclass: top objectclass: person objectclass: OCticket47653 postalAddress: here postalCode: 1234 sn: test_entry [32mINFO [0m lib389:selfdn_permissions_test.py:143 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m lib389:selfdn_permissions_test.py:147 Bind as cn=Directory Manager and add the ADD SELFDN aci [32mINFO [0m lib389:selfdn_permissions_test.py:159 Bind as cn=bind_entry, dc=example,dc=com [32mINFO [0m lib389:selfdn_permissions_test.py:164 Try to add Add cn=test_entry, dc=example,dc=com (member is missing) [32mINFO [0m lib389:selfdn_permissions_test.py:172 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m lib389:selfdn_permissions_test.py:178 Try to add Add cn=test_entry, dc=example,dc=com (with several member values) [32mINFO [0m lib389:selfdn_permissions_test.py:181 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m lib389:selfdn_permissions_test.py:184 Try to add Add cn=test_entry, dc=example,dc=com should be successful | |||
Passed | suites/acl/selfdn_permissions_test.py::test_selfdn_permission_search | 0.50 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:selfdn_permissions_test.py:205 ######################### SEARCH ###################### [32mINFO [0m lib389:selfdn_permissions_test.py:207 Bind as cn=bind_entry, dc=example,dc=com [32mINFO [0m lib389:selfdn_permissions_test.py:211 Try to search cn=test_entry, dc=example,dc=com (aci is missing) [32mINFO [0m lib389:selfdn_permissions_test.py:216 Bind as cn=Directory Manager and add the READ/SEARCH SELFDN aci [32mINFO [0m lib389:selfdn_permissions_test.py:229 Bind as cn=bind_entry, dc=example,dc=com [32mINFO [0m lib389:selfdn_permissions_test.py:233 Try to search cn=test_entry, dc=example,dc=com should be successful | |||
Passed | suites/acl/selfdn_permissions_test.py::test_selfdn_permission_modify | 0.93 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:selfdn_permissions_test.py:256 Bind as cn=bind_entry, dc=example,dc=com [32mINFO [0m lib389:selfdn_permissions_test.py:259 ######################### MODIFY ###################### [32mINFO [0m lib389:selfdn_permissions_test.py:263 Try to modify cn=test_entry, dc=example,dc=com (aci is missing) [32mINFO [0m lib389:selfdn_permissions_test.py:267 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m lib389:selfdn_permissions_test.py:271 Bind as cn=Directory Manager and add the WRITE SELFDN aci [32mINFO [0m lib389:selfdn_permissions_test.py:284 Bind as cn=bind_entry, dc=example,dc=com [32mINFO [0m lib389:selfdn_permissions_test.py:288 Try to modify cn=test_entry, dc=example,dc=com. It should succeeds | |||
Passed | suites/acl/selfdn_permissions_test.py::test_selfdn_permission_delete | 0.41 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:selfdn_permissions_test.py:314 ######################### DELETE ###################### [32mINFO [0m lib389:selfdn_permissions_test.py:317 Bind as cn=bind_entry, dc=example,dc=com [32mINFO [0m lib389:selfdn_permissions_test.py:322 Try to delete cn=test_entry, dc=example,dc=com (aci is missing) [32mINFO [0m lib389:selfdn_permissions_test.py:325 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m lib389:selfdn_permissions_test.py:329 Bind as cn=Directory Manager and add the READ/SEARCH SELFDN aci [32mINFO [0m lib389:selfdn_permissions_test.py:341 Bind as cn=bind_entry, dc=example,dc=com [32mINFO [0m lib389:selfdn_permissions_test.py:345 Try to delete cn=test_entry, dc=example,dc=com should be successful | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_1] | 0.05 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_2] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_3] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_4] | 0.12 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_5] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_6] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_7] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_8] | 0.13 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_9] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_10] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_11] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_12] | 0.06 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_13] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_14] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_15] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_16] | 0.04 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_17] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_19] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_21] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_22] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_23] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_Missing_acl_mispel] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_Missing_acl_string] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_Wrong_version_string] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_Missing_version_string] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_Authenticate_statement] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_Multiple_targets] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_Target_set_to_self] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_target_set_with_ldap_instead_of_ldap] | 0.04 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_target_set_with_more_than_three] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_target_set_with_less_than_three] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_bind_rule_set_with_less_than_three] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_Use_semicolon_instead_of_comma_in_permission] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_Use_double_equal_instead_of_equal_in_the_target] | 0.05 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_use_double_equal_instead_of_equal_in_user_and_group_access] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_donot_cote_the_name_of_the_aci] | 0.04 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_extra_parentheses_case_1] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_extra_parentheses_case_2] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_extra_parentheses_case_3] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_no_semicolon_at_the_end_of_the_aci] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_a_character_different_of_a_semicolon_at_the_end_of_the_aci] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_bad_filter] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_Use_double_equal_instead_of_equal_in_the_targattrfilters] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_Use_double_equal_instead_of_equal_inside_the_targattrfilters] | 0.04 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_target_set_above_the_entry_test | 0.04 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_positive[(CAN,ROLEDNACCESS)] | 0.06 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_positive[(CAN,USERDNACCESS)] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_positive[(CAN,GROUPDNACCESS)] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_positive[(CAN,LDAPURLACCESS)] | 0.02 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_positive[(CAN,ATTRNAMEACCESS)] | 0.04 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_positive[(LEVEL_0, OU_2)] | 0.06 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_positive[(LEVEL_1,ANCESTORS)] | 0.05 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_positive[(LEVEL_2,GRANDPARENTS)] | 0.06 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_positive[(LEVEL_4,OU_2)] | 0.06 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_positive[(LEVEL_4, ANCESTORS)] | 0.02 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_positive[(LEVEL_4,GRANDPARENTS)] | 0.02 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_positive[(LEVEL_4,PARENTS)] | 0.03 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_positive[(LEVEL_4,CHILDREN)] | 0.02 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_negative[(CANNOT,ROLEDNACCESS)] | 0.07 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_negative[(CANNOT,USERDNACCESS)] | 0.04 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_negative[(CANNOT,GROUPDNACCESS)] | 0.04 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_negative[(CANNOT,LDAPURLACCESS)] | 0.04 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_negative[(CANNOT,ATTRNAMEACCESS)] | 0.04 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_negative[(LEVEL_0, ANCESTORS)] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_negative[(LEVEL_0,GRANDPARENTS)] | 0.04 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_negative[(LEVEL_0,PARENTS)] | 0.04 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_negative[(LEVEL_0,CHILDREN)] | 0.04 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_negative[(LEVEL_2,PARENTS)] | 0.09 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_negative[(LEVEL_4,GRANDSONS)] | 0.10 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_last_three[uid=Ananda Borah,ou=Accounting,dc=example,dc=com-uid=USERDNACCESS,ou=Accounting,dc=example,dc=com] | 0.07 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_last_three[uid=Ananda Borah,ou=Accounting,dc=example,dc=com-uid=ROLEDNACCESS,ou=Accounting,dc=example,dc=com] | 0.04 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_last_three[uid=Ananda Borah,ou=Accounting,dc=example,dc=com-uid=GROUPDNACCESS,ou=Accounting,dc=example,dc=com] | 0.04 | |
No log output captured. | |||
Passed | suites/acl/valueacl_part2_test.py::test_we_can_search_as_expected | 0.02 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/acl/valueacl_part2_test.py::test_we_can_mod_title_as_expected | 0.08 | |
No log output captured. | |||
Passed | suites/acl/valueacl_part2_test.py::test_modify_with_multiple_filters | 0.06 | |
No log output captured. | |||
Passed | suites/acl/valueacl_part2_test.py::test_denied_by_multiple_filters | 0.08 | |
No log output captured. | |||
Passed | suites/acl/valueacl_part2_test.py::test_allowed_add_one_attribute | 0.06 | |
No log output captured. | |||
Passed | suites/acl/valueacl_part2_test.py::test_cannot_add_an_entry_with_attribute_values_we_are_not_allowed_add | 0.11 | |
No log output captured. | |||
Passed | suites/acl/valueacl_part2_test.py::test_on_modrdn | 0.05 | |
No log output captured. | |||
Passed | suites/acl/valueacl_part2_test.py::test_on_modrdn_allow | 0.08 | |
No log output captured. | |||
Passed | suites/acl/valueacl_part2_test.py::test_targattrfilters_keyword | 0.14 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_delete_an_attribute_value_we_are_not_allowed_to_delete | 0.10 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/acl/valueacl_test.py::test_donot_allow_write_access_to_title_if_value_is_not_architect | 0.09 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_delete_an_attribute_value_we_are_allowed_to_delete | 0.08 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_delete_an_attribute_value_we_are_not_allowed_to_deleted | 0.08 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_allow_modify_replace | 0.11 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_allow_modify_delete | 0.11 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_replace_an_attribute_if_we_lack | 0.11 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_remove_an_attribute_if_we_have_del_rights_to_all_attr_value | 0.07 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_remove_an_attribute_if_we_donot_have_del_rights_to_all_attr_value | 0.09 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_remove_an_attribute_if_we_have_del_rights_to_all_attr_values | 0.08 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_cantnot_delete_an_entry_with_attribute_values_we_are_not_allowed_delete | 0.12 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_we_can_add_and_delete_an_entry_with_attribute_values_we_are_allowed_add_and_delete | 0.09 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_allow_title | 0.08 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_allow_to_modify | 0.10 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_selfwrite_does_not_confer_write_on_a_targattrfilters_atribute | 0.08 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_selfwrite_continues_to_give_rights_to_attr_in_targetattr_list | 0.08 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_add_an_attribute_value_we_are_allowed_to_add_with_ldapanyone | 0.07 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_hierarchy | 0.10 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_targattrfilters_and_search_permissions_and_that_ldapmodify_works_as_expected | 0.07 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_targattrfilters_and_search_permissions_and_that_ldapmodify_works_as_expected_two | 0.02 | |
No log output captured. | |||
Passed | suites/attr_encryption/attr_encryption_test.py::test_basic | 6.49 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:35 Enable TLS for attribute encryption [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:38 Enables attribute encryption [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:42 Enables attribute encryption for employeeNumber and telephoneNumber [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:46 Add a test user with encrypted attributes -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:81 Restart the server [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:87 Extracting values of cn from the list of objects in encrypt_attrs [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:88 And appending the cn values in a list [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:93 Check employeenumber encryption is enabled [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:96 Check telephoneNumber encryption is enabled [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:99 Check that encrypted attribute is present for user i.e. telephoneNumber | |||
Passed | suites/attr_encryption/attr_encryption_test.py::test_export_import_ciphertext | 14.72 | |
------------------------------Captured stderr call------------------------------ ldiffile: /var/lib/dirsrv/slapd-standalone1/ldif/export_ciphertext.ldif -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:127 Export data as ciphertext [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:138 Check that the attribute is present in the exported file [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:139 Check that the encrypted value of attribute is not present in the exported file [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:145 Delete the test user entry with encrypted data [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:148 Import data as ciphertext, which was exported previously [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:159 Check that the data with encrypted attribute is imported properly | |||
Passed | suites/attr_encryption/attr_encryption_test.py::test_export_import_plaintext | 17.64 | |
------------------------------Captured stderr call------------------------------ [09/Nov/2020:19:36:59.126769555 -0500] - WARN - Security Initialization - /tmp is not a private namespace. pem files not exported there [09/Nov/2020:19:36:59.143881537 -0500] - INFO - slapd_extract_cert - CA CERT NAME: Self-Signed-CA [09/Nov/2020:19:36:59.153299059 -0500] - ERR - slapd_extract_cert - Unable to open "/tmp/slapd-standalone1/Self-Signed-CA.pem" for writing (-5950, 2). [09/Nov/2020:19:36:59.157700373 -0500] - WARN - Security Initialization - SSL alert: Sending pin request to SVRCore. You may need to run systemd-tty-ask-password-agent to provide the password. [09/Nov/2020:19:36:59.432722417 -0500] - INFO - slapd_extract_cert - SERVER CERT NAME: Server-Cert [09/Nov/2020:19:36:59.444930737 -0500] - WARN - Security Initialization - /tmp is not a private namespace. pem files not exported there [09/Nov/2020:19:36:59.454741615 -0500] - WARN - Security Initialization - /tmp is not a private namespace. pem files not exported there ldiffile: /var/lib/dirsrv/slapd-standalone1/ldif/export_plaintext.ldif [09/Nov/2020:19:37:07.162638980 -0500] - WARN - Security Initialization - /tmp is not a private namespace. pem files not exported there [09/Nov/2020:19:37:07.166520938 -0500] - INFO - slapd_extract_cert - CA CERT NAME: Self-Signed-CA [09/Nov/2020:19:37:07.173454078 -0500] - ERR - slapd_extract_cert - Unable to open "/tmp/slapd-standalone1/Self-Signed-CA.pem" for writing (-5950, 2). [09/Nov/2020:19:37:07.177850940 -0500] - WARN - Security Initialization - SSL alert: Sending pin request to SVRCore. You may need to run systemd-tty-ask-password-agent to provide the password. [09/Nov/2020:19:37:07.420132064 -0500] - INFO - slapd_extract_cert - SERVER CERT NAME: Server-Cert [09/Nov/2020:19:37:07.424274542 -0500] - WARN - Security Initialization - /tmp is not a private namespace. pem files not exported there [09/Nov/2020:19:37:07.428836057 -0500] - WARN - Security Initialization - /tmp is not a private namespace. pem files not exported there -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:189 Export data as plain text [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:200 Check that the attribute is present in the exported file [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:201 Check that the plain text value of the encrypted attribute is present in the exported file [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:205 Delete the test user entry with encrypted data [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:208 Import data as plain text, which was exported previously [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:219 Check that the attribute is imported properly | |||
Passed | suites/attr_encryption/attr_encryption_test.py::test_attr_encryption_unindexed | 6.92 | |
------------------------------Captured stderr call------------------------------ ldiffile: /var/lib/dirsrv/slapd-standalone1/ldif/emp_num_ciphertext.ldif -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:242 Export data as cipher text [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:253 Check that the attribute is present in the exported file [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:254 Check that the encrypted value of attribute is not present in the exported file | |||
Passed | suites/attr_encryption/attr_encryption_test.py::test_attr_encryption_multiple_backends | 9.65 | |
------------------------------Captured stderr call------------------------------ ldiffile: /var/lib/dirsrv/slapd-standalone1/ldif/export_db1.ldif ldiffile: /var/lib/dirsrv/slapd-standalone1/ldif/export_db2.ldif -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:287 Add two test backends [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:307 Enables attribute encryption for telephoneNumber in test_backend1 [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:312 Enables attribute encryption for employeeNumber in test_backend2 [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:317 Add a test user with encrypted attributes in both backends [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:326 Export data as ciphertext from both backends [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:343 Check that the attribute is present in the exported file in db1 [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:344 Check that the encrypted value of attribute is not present in the exported file in db1 [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:350 Check that the attribute is present in the exported file in db2 [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:351 Check that the encrypted value of attribute is not present in the exported file in db2 [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:357 Delete test backends | |||
Passed | suites/attr_encryption/attr_encryption_test.py::test_attr_encryption_backends | 11.21 | |
------------------------------Captured stderr call------------------------------ ldiffile: /var/lib/dirsrv/slapd-standalone1/ldif/export_db1.ldif ldiffile: /var/lib/dirsrv/slapd-standalone1/ldif/export_db2.ldif -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:386 Add two test backends [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:405 Enables attribute encryption for telephoneNumber in test_backend1 [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:410 Add a test user with telephoneNumber in both backends [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:419 Export data as ciphertext from both backends [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:436 Check that the attribute is present in the exported file in db1 [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:437 Check that the encrypted value of attribute is not present in the exported file in db1 [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:443 Check that the attribute is present in the exported file in db2 [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:444 Check that the value of attribute is also present in the exported file in db2 [32mINFO [0m tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:450 Delete test backends | |||
Passed | suites/automember_plugin/automember_mod_test.py::test_mods | 12.60 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.automember_plugin.automember_mod_test:automember_mod_test.py:135 Test PASSED | |||
Passed | suites/automember_plugin/automember_test.py::test_automemberscope | 0.01 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/automember_plugin/automember_test.py::test_automemberfilter | 0.26 | |
No log output captured. | |||
Passed | suites/automember_plugin/automember_test.py::test_adduser | 0.08 | |
No log output captured. | |||
Passed | suites/automember_plugin/automember_test.py::test_delete_default_group | 4.70 | |
No log output captured. | |||
Passed | suites/automember_plugin/automember_test.py::test_no_default_group | 4.49 | |
No log output captured. | |||
Passed | suites/automember_plugin/automember_test.py::test_delete_target_group | 4.96 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_disable_the_plug_in | 0.27 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. | |||
Passed | suites/automember_plugin/basic_test.py::test_custom_config_area | 0.04 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_ability_to_control_behavior_of_modifiers_name | 9.64 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_posixaccount_objectclass_automemberdefaultgroup | 0.30 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_duplicated_member_attributes_added_when_the_entry_is_re_created | 0.13 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_multi_valued_automemberdefaultgroup_for_hostgroups | 0.07 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_plugin_creates_member_attributes_of_the_automemberdefaultgroup | 0.10 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_multi_valued_automemberdefaultgroup_with_uniquemember | 9.63 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_invalid_automembergroupingattr_member | 0.56 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_valid_and_invalid_automembergroupingattr | 0.18 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_add_regular_expressions_for_user_groups_and_check_for_member_attribute_after_adding_users | 0.18 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_matching_gid_role_inclusive_regular_expression[autoMembers_22-5288-5289-Contractor-5291-5292-Contractors] | 0.07 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_matching_gid_role_inclusive_regular_expression[autoMembers_21-1161-1162-Contractor-1162-1163-Contractors] | 0.08 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_matching_gid_role_inclusive_regular_expression[autoMembers_20-1188-1189-CEO-1191-1192-Contractors] | 0.10 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_matching_gid_role_inclusive_regular_expression[autoMembers_15-9288-9289-Manager-9291-9292-Managers] | 0.08 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_matching_gid_role_inclusive_regular_expression[autoMembers_14-561-562-Manager-562-563-Managers] | 0.09 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_matching_gid_role_inclusive_regular_expression[autoMembers_13-9788-9789-VPEngg-9392-9393-Managers] | 0.37 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_gid_and_role_inclusive_exclusive_regular_expression[autoMembers_26-5788-5789-Intern-Contractors-SuffDef1-5] | 0.11 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_gid_and_role_inclusive_exclusive_regular_expression[autoMembers_25-9788-9789-Employee-Contractors-Managers-1] | 0.06 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_gid_and_role_inclusive_exclusive_regular_expression[autoMembers_24-1110-1111-Employee-Contractors-SuffDef1-5] | 0.11 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_gid_and_role_inclusive_exclusive_regular_expression[autoMembers_23-2788-2789-Contractor-Contractors-SuffDef1-5] | 0.09 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_gid_and_role_inclusive_exclusive_regular_expression[autoMembers_19-5788-5789-HRManager-Managers-SuffDef1-5] | 0.11 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_gid_and_role_inclusive_exclusive_regular_expression[autoMembers_18-6788-6789-Junior-Managers-SuffDef1-5] | 0.10 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_gid_and_role_inclusive_exclusive_regular_expression[autoMembers_17-562-563-Junior-Managers-SuffDef1-5] | 0.08 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_gid_and_role_inclusive_exclusive_regular_expression[autoMembers_16-6788-6789-Manager-Managers-SuffDef1-5] | 0.09 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_managers_contractors_exclusive_regex_rules_member_uid[autoMembers_32-555-720-Employee-SubDef1-SubDef3] | 0.10 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_managers_contractors_exclusive_regex_rules_member_uid[autoMembers_31-515-200-Junior-SubDef1-SubDef5] | 0.09 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_managers_contractors_exclusive_regex_rules_member_uid[autoMembers_30-999-400-Supervisor-SubDef1-SubDef2] | 0.10 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_managers_contractors_exclusive_regex_rules_member_uid[autoMembers_28-555-3663-ContractHR-Contractors,cn=subsuffGroups-Managers,cn=subsuffGroups] | 0.08 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_managers_inclusive_regex_rule[autoMembers_27-595-690-ContractHR-Managers-Contractors] | 0.08 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_managers_inclusive_regex_rule[autoMembers_29-8195-2753-Employee-Contractors-Managers] | 0.07 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_managers_inclusive_regex_rule[autoMembers_33-545-3333-Supervisor-Contractors-Managers] | 0.09 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_managers_inclusive_regex_rule[autoMembers_34-8195-693-Temporary-Managers-Contractors] | 0.08 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_reject_invalid_config_and_we_donot_deadlock_the_server | 9.42 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_automemtask_re_build_task | 11.21 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_automemtask_export_task | 10.33 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:tasks.py:1039 Automember Export Updates task (task-11092020_194009) completed successfully | |||
Passed | suites/automember_plugin/basic_test.py::test_automemtask_mapping | 2.67 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:tasks.py:1087 Automember Map Updates task (task-11092020_194012) completed successfully | |||
Passed | suites/automember_plugin/basic_test.py::test_automemtask_re_build | 11.31 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_automemtask_export | 14.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:tasks.py:1039 Automember Export Updates task (task-11092020_194035) completed successfully | |||
Passed | suites/automember_plugin/basic_test.py::test_automemtask_run_re_build | 21.37 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_automemtask_run_export | 16.14 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:tasks.py:1039 Automember Export Updates task (task-11092020_194116) completed successfully | |||
Passed | suites/automember_plugin/configuration_test.py::test_configuration | 4.98 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/backups/backup_test.py::test_missing_backend | 5.91 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/basic/basic_test.py::test_basic_ops | 0.23 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/basic/basic_test.py::test_basic_import_export | 64.22 | |
------------------------------Captured stderr call------------------------------ ldiffile: /var/lib/dirsrv/slapd-standalone1/ldif/export.ldif | |||
Passed | suites/basic/basic_test.py::test_basic_backup | 10.53 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:tasks.py:619 Backup task backup_11092020_194312 completed successfully [32mINFO [0m lib389:tasks.py:673 Restore task restore_11092020_194314 completed successfully | |||
Passed | suites/basic/basic_test.py::test_basic_db2index | 6.66 | |
------------------------------Captured stderr call------------------------------ [09/Nov/2020:19:43:25.434132554 -0500] - INFO - ldbm_instance_config_cachememsize_set - force a minimal value 512000 [09/Nov/2020:19:43:25.447582467 -0500] - INFO - check_and_set_import_cache - pagesize: 4096, available bytes 7575363584, process usage 22736896 [09/Nov/2020:19:43:25.453112599 -0500] - INFO - check_and_set_import_cache - Import allocates 2959126KB import cache. [09/Nov/2020:19:43:25.459777238 -0500] - INFO - bdb_copy_directory - Backing up file 0 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/userRoot/givenName.db) [09/Nov/2020:19:43:25.467634005 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/givenName.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/userRoot/givenName.db [09/Nov/2020:19:43:25.472315330 -0500] - INFO - bdb_copy_directory - Backing up file 1 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/userRoot/aci.db) [09/Nov/2020:19:43:25.477588820 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/aci.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/userRoot/aci.db [09/Nov/2020:19:43:25.483956733 -0500] - INFO - bdb_copy_directory - Backing up file 2 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/userRoot/sn.db) [09/Nov/2020:19:43:25.493737797 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/sn.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/userRoot/sn.db [09/Nov/2020:19:43:25.505422742 -0500] - INFO - bdb_copy_directory - Backing up file 3 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/userRoot/numsubordinates.db) [09/Nov/2020:19:43:25.514673744 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/numsubordinates.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/userRoot/numsubordinates.db [09/Nov/2020:19:43:25.520767306 -0500] - INFO - bdb_copy_directory - Backing up file 4 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/userRoot/entryusn.db) [09/Nov/2020:19:43:25.528609019 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/entryusn.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/userRoot/entryusn.db [09/Nov/2020:19:43:25.533616968 -0500] - INFO - bdb_copy_directory - Backing up file 5 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/userRoot/nsuniqueid.db) [09/Nov/2020:19:43:25.538003528 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/nsuniqueid.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/userRoot/nsuniqueid.db [09/Nov/2020:19:43:25.541835039 -0500] - INFO - bdb_copy_directory - Backing up file 6 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/userRoot/ancestorid.db) [09/Nov/2020:19:43:25.551845355 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/ancestorid.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/userRoot/ancestorid.db [09/Nov/2020:19:43:25.556795848 -0500] - INFO - bdb_copy_directory - Backing up file 7 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/userRoot/parentid.db) [09/Nov/2020:19:43:25.561011246 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/parentid.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/userRoot/parentid.db [09/Nov/2020:19:43:25.569574774 -0500] - INFO - bdb_copy_directory - Backing up file 8 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/userRoot/mail.db) [09/Nov/2020:19:43:25.574846791 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/mail.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/userRoot/mail.db [09/Nov/2020:19:43:25.580742167 -0500] - INFO - bdb_copy_directory - Backing up file 9 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/userRoot/telephoneNumber.db) [09/Nov/2020:19:43:25.585343929 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/telephoneNumber.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/userRoot/telephoneNumber.db [09/Nov/2020:19:43:25.590027634 -0500] - INFO - bdb_copy_directory - Backing up file 10 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/userRoot/cn.db) [09/Nov/2020:19:43:25.595470446 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/cn.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/userRoot/cn.db [09/Nov/2020:19:43:25.601567298 -0500] - INFO - bdb_copy_directory - Backing up file 11 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/userRoot/DBVERSION) [09/Nov/2020:19:43:25.614872042 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/DBVERSION to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/userRoot/DBVERSION [09/Nov/2020:19:43:25.620542820 -0500] - INFO - bdb_copy_directory - Backing up file 12 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/userRoot/uid.db) [09/Nov/2020:19:43:25.631659991 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/uid.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/userRoot/uid.db [09/Nov/2020:19:43:25.636263037 -0500] - INFO - bdb_copy_directory - Backing up file 13 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/userRoot/entryrdn.db) [09/Nov/2020:19:43:25.641448265 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/entryrdn.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/userRoot/entryrdn.db [09/Nov/2020:19:43:25.646661766 -0500] - INFO - bdb_copy_directory - Backing up file 14 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/userRoot/uniquemember.db) [09/Nov/2020:19:43:25.651496459 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/uniquemember.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/userRoot/uniquemember.db [09/Nov/2020:19:43:25.656278985 -0500] - INFO - bdb_copy_directory - Backing up file 15 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/userRoot/id2entry.db) [09/Nov/2020:19:43:25.661213244 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/id2entry.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/userRoot/id2entry.db [09/Nov/2020:19:43:25.667869198 -0500] - INFO - bdb_copy_directory - Backing up file 16 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/userRoot/objectclass.db) [09/Nov/2020:19:43:25.675466201 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/objectclass.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/userRoot/objectclass.db [09/Nov/2020:19:43:25.687325726 -0500] - INFO - upgradedb_core - userRoot: Start upgradedb. [09/Nov/2020:19:43:25.693409111 -0500] - INFO - bdb_instance_start - Import is running with nsslapd-db-private-import-mem on; No other process is allowed to access the database [09/Nov/2020:19:43:25.699270145 -0500] - INFO - check_and_set_import_cache - pagesize: 4096, available bytes 7574843392, process usage 23601152 [09/Nov/2020:19:43:25.702944225 -0500] - INFO - check_and_set_import_cache - Import allocates 2958923KB import cache. [09/Nov/2020:19:43:26.115822537 -0500] - INFO - bdb_import_main - reindex userRoot: Index buffering enabled with bucket size 100 [09/Nov/2020:19:43:26.825274404 -0500] - INFO - import_monitor_threads - reindex userRoot: Workers finished; cleaning up... [09/Nov/2020:19:43:27.032636896 -0500] - INFO - import_monitor_threads - reindex userRoot: Workers cleaned up. [09/Nov/2020:19:43:27.037423168 -0500] - INFO - bdb_import_main - reindex userRoot: Cleaning up producer thread... [09/Nov/2020:19:43:27.041202318 -0500] - INFO - bdb_import_main - reindex userRoot: Indexing complete. Post-processing... [09/Nov/2020:19:43:27.047379287 -0500] - INFO - bdb_import_main - reindex userRoot: Generating numsubordinates (this may take several minutes to complete)... [09/Nov/2020:19:43:27.052087850 -0500] - INFO - bdb_import_main - reindex userRoot: Generating numSubordinates complete. [09/Nov/2020:19:43:27.058727014 -0500] - INFO - bdb_get_nonleaf_ids - reindex userRoot: Gathering ancestorid non-leaf IDs... [09/Nov/2020:19:43:27.063420889 -0500] - INFO - bdb_get_nonleaf_ids - reindex userRoot: Finished gathering ancestorid non-leaf IDs. [09/Nov/2020:19:43:27.068058529 -0500] - INFO - ldbm_get_nonleaf_ids - reindex userRoot: Starting sort of ancestorid non-leaf IDs... [09/Nov/2020:19:43:27.072105063 -0500] - INFO - ldbm_get_nonleaf_ids - reindex userRoot: Finished sort of ancestorid non-leaf IDs. [09/Nov/2020:19:43:27.082778038 -0500] - INFO - bdb_ancestorid_new_idl_create_index - reindex userRoot: Creating ancestorid index (new idl)... [09/Nov/2020:19:43:27.088237437 -0500] - INFO - bdb_ancestorid_new_idl_create_index - reindex userRoot: Created ancestorid index (new idl). [09/Nov/2020:19:43:27.092780554 -0500] - INFO - bdb_import_main - reindex userRoot: Flushing caches... [09/Nov/2020:19:43:27.098261242 -0500] - INFO - bdb_import_main - reindex userRoot: Closing files... [09/Nov/2020:19:43:27.238703954 -0500] - INFO - bdb_import_main - reindex userRoot: Reindexing complete. Processed 160 entries in 1 seconds. (160.00 entries/sec) [09/Nov/2020:19:43:27.245333287 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/log.0000000001 to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/log.0000000001 [09/Nov/2020:19:43:27.265206032 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/DBVERSION to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T19:43:25.245547/DBVERSION [09/Nov/2020:19:43:27.270919497 -0500] - INFO - bdb_pre_close - All database threads now stopped [09/Nov/2020:19:43:27.481694724 -0500] - INFO - slapd_exemode_db2index - Backend Instance: userRoot [09/Nov/2020:19:43:27.493237058 -0500] - INFO - ldbm_instance_config_cachememsize_set - force a minimal value 512000 [09/Nov/2020:19:43:27.521932767 -0500] - INFO - bdb_instance_start - Import is running with nsslapd-db-private-import-mem on; No other process is allowed to access the database [09/Nov/2020:19:43:27.526523144 -0500] - INFO - check_and_set_import_cache - pagesize: 4096, available bytes 7577214976, process usage 23146496 [09/Nov/2020:19:43:27.530870859 -0500] - INFO - check_and_set_import_cache - Import allocates 2959849KB import cache. [09/Nov/2020:19:43:27.795228873 -0500] - INFO - bdb_db2index - userRoot: Indexing attribute: uid [09/Nov/2020:19:43:27.800567216 -0500] - ERR - libdb - BDB1566 txn_checkpoint interface requires an environment configured for the transaction subsystem [09/Nov/2020:19:43:27.805080529 -0500] - ERR - bdb_force_checkpoint - Checkpoint FAILED, error Invalid argument (22) [09/Nov/2020:19:43:27.824585850 -0500] - INFO - bdb_db2index - userRoot: Finished indexing. [09/Nov/2020:19:43:27.858854834 -0500] - INFO - bdb_pre_close - All database threads now stopped | |||
Passed | suites/basic/basic_test.py::test_basic_acl | 0.48 | |
No log output captured. | |||
Passed | suites/basic/basic_test.py::test_basic_searches | 0.13 | |
No log output captured. | |||
Passed | suites/basic/basic_test.py::test_search_req_attrs[attrs0-cn-False] | 0.00 | |
No log output captured. | |||
Passed | suites/basic/basic_test.py::test_search_req_attrs[attrs1-cn-True] | 0.00 | |
No log output captured. | |||
Passed | suites/basic/basic_test.py::test_search_req_attrs[attrs2-nsUniqueId-True] | 0.00 | |
No log output captured. | |||
Passed | suites/basic/basic_test.py::test_search_req_attrs[attrs3-cn-True] | 0.00 | |
No log output captured. | |||
Passed | suites/basic/basic_test.py::test_search_req_attrs[attrs4-cn-True] | 0.01 | |
No log output captured. | |||
Passed | suites/basic/basic_test.py::test_basic_referrals | 4.32 | |
No log output captured. | |||
Passed | suites/basic/basic_test.py::test_basic_systemctl | 12.95 | |
No log output captured. | |||
Passed | suites/basic/basic_test.py::test_basic_ldapagent | 5.03 | |
No log output captured. | |||
Passed | suites/basic/basic_test.py::test_basic_dse_survives_kill9 | 11.82 | |
No log output captured. | |||
Passed | suites/basic/basic_test.py::test_def_rootdse_attr[namingContexts] | 0.03 | |
No log output captured. | |||
Passed | suites/basic/basic_test.py::test_def_rootdse_attr[supportedLDAPVersion] | 0.02 | |
No log output captured. | |||
Passed | suites/basic/basic_test.py::test_def_rootdse_attr[supportedControl] | 0.02 | |
No log output captured. | |||
Passed | suites/basic/basic_test.py::test_def_rootdse_attr[supportedExtension] | 0.02 | |
No log output captured. | |||
Passed | suites/basic/basic_test.py::test_def_rootdse_attr[supportedSASLMechanisms] | 0.14 | |
No log output captured. | |||
Passed | suites/basic/basic_test.py::test_def_rootdse_attr[vendorName] | 0.03 | |
No log output captured. | |||
Passed | suites/basic/basic_test.py::test_def_rootdse_attr[vendorVersion] | 0.03 | |
No log output captured. | |||
Passed | suites/basic/basic_test.py::test_mod_def_rootdse_attr[namingContexts] | 0.01 | |
No log output captured. | |||
Passed | suites/basic/basic_test.py::test_mod_def_rootdse_attr[supportedLDAPVersion] | 0.24 | |
No log output captured. | |||
Passed | suites/basic/basic_test.py::test_mod_def_rootdse_attr[supportedControl] | 0.00 | |
No log output captured. | |||
Passed | suites/basic/basic_test.py::test_mod_def_rootdse_attr[supportedExtension] | 0.01 | |
No log output captured. | |||
Passed | suites/basic/basic_test.py::test_mod_def_rootdse_attr[supportedSASLMechanisms] | 0.01 | |
No log output captured. | |||
Passed | suites/basic/basic_test.py::test_mod_def_rootdse_attr[vendorName] | 0.01 | |
No log output captured. | |||
Passed | suites/basic/basic_test.py::test_mod_def_rootdse_attr[vendorVersion] | 0.01 | |
No log output captured. | |||
Passed | suites/basic/basic_test.py::test_basic_anonymous_search | 0.05 | |
No log output captured. | |||
Passed | suites/basic/basic_test.py::test_search_original_type | 0.03 | |
No log output captured. | |||
Passed | suites/basic/basic_test.py::test_search_ou | 0.01 | |
No log output captured. | |||
Passed | suites/basic/basic_test.py::test_connection_buffer_size | 0.03 | |
No log output captured. | |||
Passed | suites/basic/basic_test.py::test_critical_msg_on_empty_range_idl | 5.90 | |
No log output captured. | |||
Passed | suites/basic/basic_test.py::test_ldbm_modification_audit_log | 13.65 | |
No log output captured. | |||
Passed | suites/basic/basic_test.py::test_dscreate | 15.22 | |
------------------------------Captured stdout call------------------------------ Starting installation... Completed installation for test_dscreate | |||
Passed | suites/basic/basic_test.py::test_dscreate_ldapi | 0.00 | |
-----------------------------Captured stdout setup------------------------------ Starting installation... Completed installation for test-longname-deadbeef-deadbeef-deadbeef-deadbeef-deadbeef -------------------------------Captured log call-------------------------------- [35mDEBUG [0m RootDSE:_mapped_object.py:635 get_attr_vals('supportedControl') | |||
Passed | suites/basic/basic_test.py::test_dscreate_multiple_dashes_name | 19.73 | |
-----------------------------Captured stdout setup------------------------------ Starting installation... Completed installation for test-longname-deadbeef-deadbeef-deadbeef-deadbeef-deadbeef | |||
Passed | suites/basic/basic_test.py::test_dscreate_with_different_rdn[c=uk] | 19.08 | |
------------------------------Captured stdout call------------------------------ Starting installation... Completed installation for test_different_rdn | |||
Passed | suites/basic/basic_test.py::test_dscreate_with_different_rdn[cn=test_user] | 20.95 | |
-----------------------------Captured stdout setup------------------------------ Removing instance ... Completed instance removal ------------------------------Captured stdout call------------------------------ Starting installation... Completed installation for test_different_rdn | |||
Passed | suites/basic/basic_test.py::test_dscreate_with_different_rdn[dc=example,dc=com] | 19.74 | |
-----------------------------Captured stdout setup------------------------------ Removing instance ... Completed instance removal ------------------------------Captured stdout call------------------------------ Starting installation... Completed installation for test_different_rdn | |||
Passed | suites/basic/basic_test.py::test_dscreate_with_different_rdn[o=south] | 19.21 | |
-----------------------------Captured stdout setup------------------------------ Removing instance ... Completed instance removal ------------------------------Captured stdout call------------------------------ Starting installation... Completed installation for test_different_rdn | |||
Passed | suites/basic/basic_test.py::test_dscreate_with_different_rdn[ou=sales] | 18.42 | |
-----------------------------Captured stdout setup------------------------------ Removing instance ... Completed instance removal ------------------------------Captured stdout call------------------------------ Starting installation... Completed installation for test_different_rdn | |||
Passed | suites/basic/basic_test.py::test_dscreate_with_different_rdn[wrong=some_value] | 18.29 | |
-----------------------------Captured stdout setup------------------------------ Removing instance ... Completed instance removal ------------------------------Captured stdout call------------------------------ Starting installation... Error: Instance creation failed! Suffix RDN 'wrong' in 'wrong=some_value' is not supported. Supported RDN's are: 'c', 'cn', 'dc', 'o', and 'ou' -------------------------------Captured log call-------------------------------- [31mCRITICAL[0m tests.suites.basic.basic_test:basic_test.py:1474 dscreate failed! Error (1) None | |||
Passed | suites/betxns/betxn_test.py::test_betxt_7bit | 5.37 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.betxns.betxn_test:betxn_test.py:52 Running test_betxt_7bit... [32mINFO [0m tests.suites.betxns.betxn_test:betxn_test.py:78 test_betxt_7bit: PASSED | |||
Passed | suites/betxns/betxn_test.py::test_betxn_attr_uniqueness | 4.51 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.betxns.betxn_test:betxn_test.py:133 test_betxn_attr_uniqueness: PASSED | |||
Passed | suites/betxns/betxn_test.py::test_betxn_memberof | 5.09 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.betxns.betxn_test:betxn_test.py:179 test_betxn_memberof: PASSED | |||
Passed | suites/betxns/betxn_test.py::test_betxn_modrdn_memberof_cache_corruption | 5.28 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.betxns.betxn_test:betxn_test.py:233 test_betxn_modrdn_memberof: PASSED | |||
Passed | suites/betxns/betxn_test.py::test_ri_and_mep_cache_corruption | 0.50 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.betxns.betxn_test:betxn_test.py:357 Test PASSED | |||
Passed | suites/clu/clu_test.py::test_clu_pwdhash | 0.06 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.clu.clu_test:clu_test.py:40 Running test_clu_pwdhash... [32mINFO [0m tests.suites.clu.clu_test:clu_test.py:54 pwdhash generated: {SSHA}2hJmEgk1RbdjbWXx6sHpPEQXzsRabWUKO8iZjA== [32mINFO [0m tests.suites.clu.clu_test:clu_test.py:55 test_clu_pwdhash: PASSED | |||
Passed | suites/clu/clu_test.py::test_clu_pwdhash_mod | 0.08 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.clu.clu_test:clu_test.py:78 Running test_clu_pwdhash_mod... [32mINFO [0m tests.suites.clu.clu_test:clu_test.py:87 pwdhash generated: {SSHA256}Kx0MibhTFGt+06ssqYywraSP9y760kpfk23A/BTGZ8kGw6OQKFu7/Q== [32mINFO [0m tests.suites.clu.clu_test:clu_test.py:88 returned the hashed string using the algorithm set in nsslapd-rootpwstoragescheme | |||
Passed | suites/clu/dbgen_test.py::test_dsconf_dbgen_users | 7.54 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:119 Run ldifgen to create users ldif [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:56 Generating LDIF with the following options: [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - suffix=dc=example,dc=com [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - parent=ou=people,dc=example,dc=com [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - number=1000 [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - rdn-cn=False [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - generic=True [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - start-idx=50 [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - localize=False [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:62 - ldif-file=/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:63 Writing LDIF ... [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:196 Successfully created LDIF file: /var/lib/dirsrv/slapd-standalone1/ldif/created.ldif [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:122 Check if file exists [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:67 Check if content is present in output [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:71 Reset log file for next test [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:127 Get number of accounts before import [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:48 Stopping the server and running offline import... [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:133 Check that accounts are imported | |||
Passed | suites/clu/dbgen_test.py::test_dsconf_dbgen_groups | 47.06 | |
------------------------------Captured stderr call------------------------------ ldap_add: Already exists (68) ldap_add: Already exists (68) ldap_add: Already exists (68) -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:183 Run ldifgen to create group ldif [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:56 Generating LDIF with the following options: [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - NAME=myGroup [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - parent=ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - suffix=dc=example,dc=com [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - number=1 [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - num-members=1000 [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - create-members=True [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - member-attr=uniquemember [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - member-parent=ou=people,dc=example,dc=com [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:62 - ldif-file=/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:63 Writing LDIF ... [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:250 Successfully created LDIF file: /var/lib/dirsrv/slapd-standalone1/ldif/created.ldif [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:186 Check if file exists [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:67 Check if content is present in output [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:71 Reset log file for next test [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:191 Get number of accounts before import [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:57 Add entries from ldif file with ldapmodify [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:200 Check that accounts are imported [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:203 Check that group is imported | |||
Passed | suites/clu/dbgen_test.py::test_dsconf_dbgen_cos_classic | 0.09 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:256 Run ldifgen to create COS definition ldif [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:56 Generating LDIF with the following options: [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - type=classic [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - NAME=My_Postal_Def [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - parent=ou=cos definitions,dc=example,dc=com [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - create-parent=True [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - cos-specifier=businessCategory [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - cos-attr=['postalcode', 'telephonenumber'] [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - cos-template=cn=sales,cn=classicCoS,dc=example,dc=com [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:62 - ldif-file=/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:63 Writing LDIF ... [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:304 Successfully created LDIF file: /var/lib/dirsrv/slapd-standalone1/ldif/created.ldif [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:259 Check if file exists [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:67 Check if content is present in output [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:71 Reset log file for next test [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:57 Add entries from ldif file with ldapmodify [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:267 Check that COS definition is imported | |||
Passed | suites/clu/dbgen_test.py::test_dsconf_dbgen_cos_pointer | 0.05 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:322 Run ldifgen to create COS definition ldif [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:56 Generating LDIF with the following options: [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - type=pointer [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - NAME=My_Postal_Def_pointer [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - parent=ou=cos pointer definitions,dc=example,dc=com [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - create-parent=True [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - cos-attr=['postalcode', 'telephonenumber'] [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - cos-template=cn=sales,cn=pointerCoS,dc=example,dc=com [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:62 - ldif-file=/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:63 Writing LDIF ... [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:304 Successfully created LDIF file: /var/lib/dirsrv/slapd-standalone1/ldif/created.ldif [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:325 Check if file exists [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:67 Check if content is present in output [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:71 Reset log file for next test [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:57 Add entries from ldif file with ldapmodify [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:333 Check that COS definition is imported | |||
Passed | suites/clu/dbgen_test.py::test_dsconf_dbgen_cos_indirect | 0.04 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:387 Run ldifgen to create COS definition ldif [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:56 Generating LDIF with the following options: [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - type=indirect [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - NAME=My_Postal_Def_indirect [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - parent=ou=cos indirect definitions,dc=example,dc=com [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - create-parent=True [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - cos-specifier=businessCategory [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - cos-attr=['postalcode', 'telephonenumber'] [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:62 - ldif-file=/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:63 Writing LDIF ... [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:304 Successfully created LDIF file: /var/lib/dirsrv/slapd-standalone1/ldif/created.ldif [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:390 Check if file exists [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:67 Check if content is present in output [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:71 Reset log file for next test [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:57 Add entries from ldif file with ldapmodify [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:398 Check that COS definition is imported | |||
Passed | suites/clu/dbgen_test.py::test_dsconf_dbgen_cos_template | 0.05 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:449 Run ldifgen to create COS template ldif [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:56 Generating LDIF with the following options: [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - NAME=My_Template [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - parent=ou=cos templates,dc=example,dc=com [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - create-parent=True [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - cos-priority=1 [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - cos-attr-val=postalcode:12345 [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:62 - ldif-file=/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:63 Writing LDIF ... [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:341 Successfully created LDIF file: /var/lib/dirsrv/slapd-standalone1/ldif/created.ldif [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:452 Check if file exists [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:67 Check if content is present in output [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:71 Reset log file for next test [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:57 Add entries from ldif file with ldapmodify [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:460 Check that COS template is imported | |||
Passed | suites/clu/dbgen_test.py::test_dsconf_dbgen_managed_role | 0.06 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:511 Run ldifgen to create managed role ldif [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:56 Generating LDIF with the following options: [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - NAME=My_Managed_Role [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - parent=ou=managed roles,dc=example,dc=com [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - create-parent=True [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - type=managed [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:62 - ldif-file=/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:63 Writing LDIF ... [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:391 Successfully created LDIF file: /var/lib/dirsrv/slapd-standalone1/ldif/created.ldif [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:514 Check if file exists [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:67 Check if content is present in output [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:71 Reset log file for next test [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:57 Add entries from ldif file with ldapmodify [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:522 Check that managed role is imported | |||
Passed | suites/clu/dbgen_test.py::test_dsconf_dbgen_filtered_role | 0.06 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:571 Run ldifgen to create filtered role ldif [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:56 Generating LDIF with the following options: [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - NAME=My_Filtered_Role [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - parent=ou=filtered roles,dc=example,dc=com [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - create-parent=True [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - type=filtered [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - filter="objectclass=posixAccount" [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:62 - ldif-file=/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:63 Writing LDIF ... [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:391 Successfully created LDIF file: /var/lib/dirsrv/slapd-standalone1/ldif/created.ldif [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:574 Check if file exists [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:67 Check if content is present in output [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:71 Reset log file for next test [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:57 Add entries from ldif file with ldapmodify [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:582 Check that filtered role is imported | |||
Passed | suites/clu/dbgen_test.py::test_dsconf_dbgen_nested_role | 0.04 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:632 Run ldifgen to create nested role ldif [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:56 Generating LDIF with the following options: [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - NAME=My_Nested_Role [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - parent=ou=nested roles,dc=example,dc=com [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - create-parent=True [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - type=nested [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - role-dn=['cn=some_role,ou=roles,dc=example,dc=com'] [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:62 - ldif-file=/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:63 Writing LDIF ... [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:391 Successfully created LDIF file: /var/lib/dirsrv/slapd-standalone1/ldif/created.ldif [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:635 Check if file exists [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:67 Check if content is present in output [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:71 Reset log file for next test [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:57 Add entries from ldif file with ldapmodify [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:643 Check that nested role is imported | |||
Passed | suites/clu/dbgen_test.py::test_dsconf_dbgen_mod_ldif_mixed | 53.32 | |
------------------------------Captured stderr call------------------------------ ldap_modify: Operation not allowed on RDN (67) ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldapmodify: extra lines at end (line 43453, entry "uid=user0999,dc=example,dc=com") -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:702 Run ldifgen to create modification ldif [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:56 Generating LDIF with the following options: [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - parent=dc=example,dc=com [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - create-users=True [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - delete-users=True [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - create-parent=False [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - num-users=1000 [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - add-users=100 [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - del-users=999 [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - modrdn-users=100 [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - mod-users=10 [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - mod-attrs=['cn', 'uid', 'sn'] [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - randomize=False [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:62 - ldif-file=/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:63 Writing LDIF ... [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:467 Successfully created LDIF file: /var/lib/dirsrv/slapd-standalone1/ldif/created.ldif [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:705 Check if file exists [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:67 Check if content is present in output [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:71 Reset log file for next test [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:710 Get number of accounts before import [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:57 Add entries from ldif file with ldapmodify [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:719 Check that some accounts are imported | |||
Passed | suites/clu/dbgen_test.py::test_dsconf_dbgen_nested_ldif | 35.73 | |
------------------------------Captured stderr call------------------------------ ldap_add: Already exists (68) -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:759 Run ldifgen to create nested ldif [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:56 Generating LDIF with the following options: [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - suffix=dc=example,dc=com [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - node-limit=100 [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:61 - num-users=600 [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:62 - ldif-file=/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:63 Writing LDIF ... [32mINFO [0m tests.suites.clu.dbgen_test:dbgen.py:500 Successfully created nested LDIF file (/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif) containing 6 nodes/subtrees [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:762 Check if file exists [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:67 Check if content is present in output [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:71 Reset log file for next test [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:767 Get number of accounts before import [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:57 Add entries from ldif file with ldapmodify [32mINFO [0m tests.suites.clu.dbgen_test:dbgen_test.py:779 Check that accounts are imported | |||
Passed | suites/clu/dbmon_test.py::test_dsconf_dbmon | 0.59 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.clu.dbmon_test:dbmon_test.py:164 Sanity check for syntax [32mINFO [0m LogCapture:monitor.py:247 DB Monitor Report: 2020-11-09 19:51:41 [32mINFO [0m LogCapture:monitor.py:248 -------------------------------------------------------- [32mINFO [0m LogCapture:monitor.py:249 Database Cache: [32mINFO [0m LogCapture:monitor.py:250 - Cache Hit Ratio: 100% [32mINFO [0m LogCapture:monitor.py:251 - Free Space: 486.95 MB [32mINFO [0m LogCapture:monitor.py:252 - Free Percentage: 100.0% [32mINFO [0m LogCapture:monitor.py:253 - RO Page Drops: 0 [32mINFO [0m LogCapture:monitor.py:254 - Pages In: 0 [32mINFO [0m LogCapture:monitor.py:255 - Pages Out: 0 [32mINFO [0m LogCapture:monitor.py:256 [32mINFO [0m LogCapture:monitor.py:257 Normalized DN Cache: [32mINFO [0m LogCapture:monitor.py:258 - Cache Hit Ratio: 72% [32mINFO [0m LogCapture:monitor.py:259 - Free Space: 19.99 MB [32mINFO [0m LogCapture:monitor.py:260 - Free Percentage: 99.9% [32mINFO [0m LogCapture:monitor.py:261 - DN Count: 71 [32mINFO [0m LogCapture:monitor.py:262 - Evictions: 0 [32mINFO [0m LogCapture:monitor.py:263 [32mINFO [0m LogCapture:monitor.py:264 Backends: [32mINFO [0m LogCapture:monitor.py:266 - dc=example,dc=com (userRoot): [32mINFO [0m LogCapture:monitor.py:267 - Entry Cache Hit Ratio: 40% [32mINFO [0m LogCapture:monitor.py:268 - Entry Cache Count: 5 [32mINFO [0m LogCapture:monitor.py:269 - Entry Cache Free Space: 1.31 GB [32mINFO [0m LogCapture:monitor.py:270 - Entry Cache Free Percentage: 100.0% [32mINFO [0m LogCapture:monitor.py:271 - Entry Cache Average Size: 3.65 KB [32mINFO [0m LogCapture:monitor.py:272 - DN Cache Hit Ratio: 0% [32mINFO [0m LogCapture:monitor.py:273 - DN Cache Count: 5 [32mINFO [0m LogCapture:monitor.py:274 - DN Cache Free Space: 192.0 MB [32mINFO [0m LogCapture:monitor.py:275 - DN Cache Free Percentage: 100.0% [32mINFO [0m LogCapture:monitor.py:276 - DN Cache Average Size: 67.0 B [32mINFO [0m LogCapture:monitor.py:286 [32mINFO [0m tests.suites.clu.dbmon_test:dbmon_test.py:133 Clear the log [32mINFO [0m tests.suites.clu.dbmon_test:dbmon_test.py:171 Sanity check for --indexes output [32mINFO [0m LogCapture:monitor.py:247 DB Monitor Report: 2020-11-09 19:51:41 [32mINFO [0m LogCapture:monitor.py:248 -------------------------------------------------------- [32mINFO [0m LogCapture:monitor.py:249 Database Cache: [32mINFO [0m LogCapture:monitor.py:250 - Cache Hit Ratio: 100% [32mINFO [0m LogCapture:monitor.py:251 - Free Space: 486.95 MB [32mINFO [0m LogCapture:monitor.py:252 - Free Percentage: 100.0% [32mINFO [0m LogCapture:monitor.py:253 - RO Page Drops: 0 [32mINFO [0m LogCapture:monitor.py:254 - Pages In: 0 [32mINFO [0m LogCapture:monitor.py:255 - Pages Out: 0 [32mINFO [0m LogCapture:monitor.py:256 [32mINFO [0m LogCapture:monitor.py:257 Normalized DN Cache: [32mINFO [0m LogCapture:monitor.py:258 - Cache Hit Ratio: 72% [32mINFO [0m LogCapture:monitor.py:259 - Free Space: 19.99 MB [32mINFO [0m LogCapture:monitor.py:260 - Free Percentage: 99.9% [32mINFO [0m LogCapture:monitor.py:261 - DN Count: 71 [32mINFO [0m LogCapture:monitor.py:262 - Evictions: 0 [32mINFO [0m LogCapture:monitor.py:263 [32mINFO [0m LogCapture:monitor.py:264 Backends: [32mINFO [0m LogCapture:monitor.py:266 - dc=example,dc=com (userRoot): [32mINFO [0m LogCapture:monitor.py:267 - Entry Cache Hit Ratio: 40% [32mINFO [0m LogCapture:monitor.py:268 - Entry Cache Count: 5 [32mINFO [0m LogCapture:monitor.py:269 - Entry Cache Free Space: 1.31 GB [32mINFO [0m LogCapture:monitor.py:270 - Entry Cache Free Percentage: 100.0% [32mINFO [0m LogCapture:monitor.py:271 - Entry Cache Average Size: 3.65 KB [32mINFO [0m LogCapture:monitor.py:272 - DN Cache Hit Ratio: 0% [32mINFO [0m LogCapture:monitor.py:273 - DN Cache Count: 5 [32mINFO [0m LogCapture:monitor.py:274 - DN Cache Free Space: 192.0 MB [32mINFO [0m LogCapture:monitor.py:275 - DN Cache Free Percentage: 100.0% [32mINFO [0m LogCapture:monitor.py:276 - DN Cache Average Size: 67.0 B [32mINFO [0m LogCapture:monitor.py:278 - Indexes: [32mINFO [0m LogCapture:monitor.py:280 - Index: numsubordinates.db [32mINFO [0m LogCapture:monitor.py:281 - Cache Hit: 0 [32mINFO [0m LogCapture:monitor.py:282 - Cache Miss: 0 [32mINFO [0m LogCapture:monitor.py:283 - Page In: 0 [32mINFO [0m LogCapture:monitor.py:284 - Page Out: 0 [32mINFO [0m LogCapture:monitor.py:285 [32mINFO [0m LogCapture:monitor.py:280 - Index: nsuniqueid.db [32mINFO [0m LogCapture:monitor.py:281 - Cache Hit: 0 [32mINFO [0m LogCapture:monitor.py:282 - Cache Miss: 0 [32mINFO [0m LogCapture:monitor.py:283 - Page In: 0 [32mINFO [0m LogCapture:monitor.py:284 - Page Out: 0 [32mINFO [0m LogCapture:monitor.py:285 [32mINFO [0m LogCapture:monitor.py:280 - Index: id2entry.db [32mINFO [0m LogCapture:monitor.py:281 - Cache Hit: 8 [32mINFO [0m LogCapture:monitor.py:282 - Cache Miss: 0 [32mINFO [0m LogCapture:monitor.py:283 - Page In: 0 [32mINFO [0m LogCapture:monitor.py:284 - Page Out: 0 [32mINFO [0m LogCapture:monitor.py:285 [32mINFO [0m LogCapture:monitor.py:280 - Index: uid.db [32mINFO [0m LogCapture:monitor.py:281 - Cache Hit: 0 [32mINFO [0m LogCapture:monitor.py:282 - Cache Miss: 0 [32mINFO [0m LogCapture:monitor.py:283 - Page In: 0 [32mINFO [0m LogCapture:monitor.py:284 - Page Out: 0 [32mINFO [0m LogCapture:monitor.py:285 [32mINFO [0m LogCapture:monitor.py:280 - Index: objectclass.db [32mINFO [0m LogCapture:monitor.py:281 - Cache Hit: 14 [32mINFO [0m LogCapture:monitor.py:282 - Cache Miss: 0 [32mINFO [0m LogCapture:monitor.py:283 - Page In: 0 [32mINFO [0m LogCapture:monitor.py:284 - Page Out: 0 [32mINFO [0m LogCapture:monitor.py:285 [32mINFO [0m LogCapture:monitor.py:280 - Index: aci.db [32mINFO [0m LogCapture:monitor.py:281 - Cache Hit: 3 [32mINFO [0m LogCapture:monitor.py:282 - Cache Miss: 0 [32mINFO [0m LogCapture:monitor.py:283 - Page In: 0 [32mINFO [0m LogCapture:monitor.py:284 - Page Out: 0 [32mINFO [0m LogCapture:monitor.py:285 [32mINFO [0m LogCapture:monitor.py:280 - Index: entryrdn.db [32mINFO [0m LogCapture:monitor.py:281 - Cache Hit: 16 [32mINFO [0m LogCapture:monitor.py:282 - Cache Miss: 0 [32mINFO [0m LogCapture:monitor.py:283 - Page In: 0 [32mINFO [0m LogCapture:monitor.py:284 - Page Out: 0 [32mINFO [0m LogCapture:monitor.py:285 [32mINFO [0m LogCapture:monitor.py:280 - Index: cn.db [32mINFO [0m LogCapture:monitor.py:281 - Cache Hit: 0 [32mINFO [0m LogCapture:monitor.py:282 - Cache Miss: 0 [32mINFO [0m LogCapture:monitor.py:283 - Page In: 0 [32mINFO [0m LogCapture:monitor.py:284 - Page Out: 0 [32mINFO [0m LogCapture:monitor.py:285 [32mINFO [0m LogCapture:monitor.py:280 - Index: ancestorid.db [32mINFO [0m LogCapture:monitor.py:281 - Cache Hit: 0 [32mINFO [0m LogCapture:monitor.py:282 - Cache Miss: 0 [32mINFO [0m LogCapture:monitor.py:283 - Page In: 0 [32mINFO [0m LogCapture:monitor.py:284 - Page Out: 0 [32mINFO [0m LogCapture:monitor.py:285 [32mINFO [0m LogCapture:monitor.py:280 - Index: parentid.db [32mINFO [0m LogCapture:monitor.py:281 - Cache Hit: 0 [32mINFO [0m LogCapture:monitor.py:282 - Cache Miss: 0 [32mINFO [0m LogCapture:monitor.py:283 - Page In: 0 [32mINFO [0m LogCapture:monitor.py:284 - Page Out: 0 [32mINFO [0m LogCapture:monitor.py:285 [32mINFO [0m LogCapture:monitor.py:286 [32mINFO [0m tests.suites.clu.dbmon_test:dbmon_test.py:133 Clear the log [32mINFO [0m tests.suites.clu.dbmon_test:dbmon_test.py:179 Sanity check for --json output [32mINFO [0m LogCapture:monitor.py:245 { "date": "2020-11-09 19:51:41", "dbcache": { "hit_ratio": "100", "free": "486.95 MB", "free_percentage": "100.0", "roevicts": "0", "pagein": "0", "pageout": "0" }, "ndncache": { "hit_ratio": "72", "free": "19.99 MB", "free_percentage": "99.9", "count": "71", "evictions": "0" }, "backends": { "userRoot": { "suffix": "dc=example,dc=com", "entry_cache_count": "5", "entry_cache_free": "1.31 GB", "entry_cache_free_percentage": "100.0", "entry_cache_size": "3.65 KB", "entry_cache_hit_ratio": "40", "dn_cache_count": "5", "dn_cache_free": "192.0 MB", "dn_cache_free_percentage": "100.0", "dn_cache_size": "67.0 B", "dn_cache_hit_ratio": "0", "indexes": [ { "name": "numsubordinates.db", "cachehit": "0", "cachemiss": "0", "pagein": "0", "pageout": "0" }, { "name": "nsuniqueid.db", "cachehit": "0", "cachemiss": "0", "pagein": "0", "pageout": "0" }, { "name": "id2entry.db", "cachehit": "8", "cachemiss": "0", "pagein": "0", "pageout": "0" }, { "name": "uid.db", "cachehit": "0", "cachemiss": "0", "pagein": "0", "pageout": "0" }, { "name": "objectclass.db", "cachehit": "14", "cachemiss": "0", "pagein": "0", "pageout": "0" }, { "name": "aci.db", "cachehit": "3", "cachemiss": "0", "pagein": "0", "pageout": "0" }, { "name": "entryrdn.db", "cachehit": "16", "cachemiss": "0", "pagein": "0", "pageout": "0" }, { "name": "cn.db", "cachehit": "0", "cachemiss": "0", "pagein": "0", "pageout": "0" }, { "name": "ancestorid.db", "cachehit": "0", "cachemiss": "0", "pagein": "0", "pageout": "0" }, { "name": "parentid.db", "cachehit": "0", "cachemiss": "0", "pagein": "0", "pageout": "0" } ] } } } [32mINFO [0m tests.suites.clu.dbmon_test:dbmon_test.py:133 Clear the log | |||
Passed | suites/clu/dbverify_test.py::test_dsctl_dbverify | 2.42 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. ------------------------------Captured stderr call------------------------------ [09/Nov/2020:19:51:57.542342817 -0500] - INFO - ldbm_instance_config_cachememsize_set - force a minimal value 512000 -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.clu.dbverify_test:dbverify_test.py:63 Run dbverify [32mINFO [0m tests.suites.clu.dbverify_test:dbtasks.py:88 dbverify successful [32mINFO [0m tests.suites.clu.dbverify_test:dbverify_test.py:67 Check dbverify was successful | |||
Passed | suites/clu/dsidm_config_test.py::test_dsidm_config_sssd | 5.23 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. ------------------------------Captured stdout call------------------------------ # # sssd.conf # Generated by 389 Directory Server - dsidm # # For more details see man sssd.conf and man sssd-ldap # Be sure to review the content of this file to ensure it is secure and correct # in your environment. [domain/ldap] # Uncomment this for more verbose logging. # debug_level=3 # Cache hashes of user authentication for offline auth. cache_credentials = True id_provider = ldap auth_provider = ldap access_provider = ldap chpass_provider = ldap ldap_schema = rfc2307 ldap_search_base = dc=example,dc=com ldap_uri = ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:38901 # If you have DNS SRV records, you can use the following instead. This derives # from your ldap_search_base. # ldap_uri = _srv_ ldap_tls_reqcert = demand # To use cacert dir, place *.crt files in this path then run: # /usr/bin/openssl rehash /etc/openldap/certs # or (for older versions of openssl) # /usr/bin/c_rehash /etc/openldap/certs ldap_tls_cacertdir = /etc/openldap/certs # Path to the cacert # ldap_tls_cacert = /etc/openldap/certs/ca.crt # Only users who match this filter can login and authorise to this machine. Note # that users who do NOT match, will still have their uid/gid resolve, but they # can't login. # ldap_access_filter = (memberOf=<dn>) enumerate = false access_provider = ldap ldap_user_member_of = memberof ldap_user_gecos = cn ldap_user_uuid = nsUniqueId ldap_group_uuid = nsUniqueId # This is really important as it allows SSSD to respect nsAccountLock ldap_account_expire_policy = rhds ldap_access_order = filter, expire # Setup for ssh keys # Inside /etc/ssh/sshd_config add the lines: # AuthorizedKeysCommand /usr/bin/sss_ssh_authorizedkeys # AuthorizedKeysCommandUser nobody # You can test with the command: sss_ssh_authorizedkeys <username> ldap_user_ssh_public_key = nsSshPublicKey # This prevents an issue where the Directory is recursively walked on group # and user look ups. It makes the client faster and more responsive in almost # every scenario. ignore_group_members = False [sssd] services = nss, pam, ssh, sudo config_file_version = 2 domains = ldap [nss] homedir_substring = /home # # sssd.conf # Generated by 389 Directory Server - dsidm # # For more details see man sssd.conf and man sssd-ldap # Be sure to review the content of this file to ensure it is secure and correct # in your environment. [domain/ldap] # Uncomment this for more verbose logging. # debug_level=3 # Cache hashes of user authentication for offline auth. cache_credentials = True id_provider = ldap auth_provider = ldap access_provider = ldap chpass_provider = ldap ldap_schema = rfc2307bis ldap_search_base = dc=example,dc=com ldap_uri = ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:38901 # If you have DNS SRV records, you can use the following instead. This derives # from your ldap_search_base. # ldap_uri = _srv_ ldap_tls_reqcert = demand # To use cacert dir, place *.crt files in this path then run: # /usr/bin/openssl rehash /etc/openldap/certs # or (for older versions of openssl) # /usr/bin/c_rehash /etc/openldap/certs ldap_tls_cacertdir = /etc/openldap/certs # Path to the cacert # ldap_tls_cacert = /etc/openldap/certs/ca.crt # Only users who match this filter can login and authorise to this machine. Note # that users who do NOT match, will still have their uid/gid resolve, but they # can't login. ldap_access_filter = (memberOf=cn=new_group,ou=groups,dc=example,dc=com) enumerate = false access_provider = ldap ldap_user_member_of = memberof ldap_user_gecos = cn ldap_user_uuid = nsUniqueId ldap_group_uuid = nsUniqueId # This is really important as it allows SSSD to respect nsAccountLock ldap_account_expire_policy = rhds ldap_access_order = filter, expire # Setup for ssh keys # Inside /etc/ssh/sshd_config add the lines: # AuthorizedKeysCommand /usr/bin/sss_ssh_authorizedkeys # AuthorizedKeysCommandUser nobody # You can test with the command: sss_ssh_authorizedkeys <username> ldap_user_ssh_public_key = nsSshPublicKey # This prevents an issue where the Directory is recursively walked on group # and user look ups. It makes the client faster and more responsive in almost # every scenario. ignore_group_members = False [sssd] services = nss, pam, ssh, sudo config_file_version = 2 domains = ldap [nss] homedir_substring = /home -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.clu.dsidm_config_test:dsidm_config_test.py:101 Create sssd.conf content [35mDEBUG [0m tests.suites.clu.dsidm_config_test:client_config.py:114 # # sssd.conf # Generated by 389 Directory Server - dsidm # # For more details see man sssd.conf and man sssd-ldap # Be sure to review the content of this file to ensure it is secure and correct # in your environment. [domain/ldap] # Uncomment this for more verbose logging. # debug_level=3 # Cache hashes of user authentication for offline auth. cache_credentials = True id_provider = ldap auth_provider = ldap access_provider = ldap chpass_provider = ldap ldap_schema = rfc2307 ldap_search_base = dc=example,dc=com ldap_uri = ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:38901 # If you have DNS SRV records, you can use the following instead. This derives # from your ldap_search_base. # ldap_uri = _srv_ ldap_tls_reqcert = demand # To use cacert dir, place *.crt files in this path then run: # /usr/bin/openssl rehash /etc/openldap/certs # or (for older versions of openssl) # /usr/bin/c_rehash /etc/openldap/certs ldap_tls_cacertdir = /etc/openldap/certs # Path to the cacert # ldap_tls_cacert = /etc/openldap/certs/ca.crt # Only users who match this filter can login and authorise to this machine. Note # that users who do NOT match, will still have their uid/gid resolve, but they # can't login. # ldap_access_filter = (memberOf=<dn>) enumerate = false access_provider = ldap ldap_user_member_of = memberof ldap_user_gecos = cn ldap_user_uuid = nsUniqueId ldap_group_uuid = nsUniqueId # This is really important as it allows SSSD to respect nsAccountLock ldap_account_expire_policy = rhds ldap_access_order = filter, expire # Setup for ssh keys # Inside /etc/ssh/sshd_config add the lines: # AuthorizedKeysCommand /usr/bin/sss_ssh_authorizedkeys # AuthorizedKeysCommandUser nobody # You can test with the command: sss_ssh_authorizedkeys <username> ldap_user_ssh_public_key = nsSshPublicKey # This prevents an issue where the Directory is recursively walked on group # and user look ups. It makes the client faster and more responsive in almost # every scenario. ignore_group_members = False [sssd] services = nss, pam, ssh, sudo config_file_version = 2 domains = ldap [nss] homedir_substring = /home [32mINFO [0m tests.suites.clu.dsidm_config_test:dsidm_config_test.py:104 Check if config creation was successful [32mINFO [0m tests.suites.clu.dsidm_config_test:dsidm_config_test.py:51 Check if content is present in output [32mINFO [0m tests.suites.clu.dsidm_config_test:dsidm_config_test.py:56 Check if value is present in output [32mINFO [0m tests.suites.clu.dsidm_config_test:dsidm_config_test.py:59 Reset log file for next test [32mINFO [0m tests.suites.clu.dsidm_config_test:dsidm_config_test.py:107 Now we test allowed_group argument [32mINFO [0m tests.suites.clu.dsidm_config_test:dsidm_config_test.py:108 Enable MemberOf plugin [32mINFO [0m tests.suites.clu.dsidm_config_test:dsidm_config_test.py:113 Create test group [32mINFO [0m tests.suites.clu.dsidm_config_test:dsidm_config_test.py:118 Create sssd.conf content with allowed group [35mDEBUG [0m tests.suites.clu.dsidm_config_test:client_config.py:114 # # sssd.conf # Generated by 389 Directory Server - dsidm # # For more details see man sssd.conf and man sssd-ldap # Be sure to review the content of this file to ensure it is secure and correct # in your environment. [domain/ldap] # Uncomment this for more verbose logging. # debug_level=3 # Cache hashes of user authentication for offline auth. cache_credentials = True id_provider = ldap auth_provider = ldap access_provider = ldap chpass_provider = ldap ldap_schema = rfc2307bis ldap_search_base = dc=example,dc=com ldap_uri = ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:38901 # If you have DNS SRV records, you can use the following instead. This derives # from your ldap_search_base. # ldap_uri = _srv_ ldap_tls_reqcert = demand # To use cacert dir, place *.crt files in this path then run: # /usr/bin/openssl rehash /etc/openldap/certs # or (for older versions of openssl) # /usr/bin/c_rehash /etc/openldap/certs ldap_tls_cacertdir = /etc/openldap/certs # Path to the cacert # ldap_tls_cacert = /etc/openldap/certs/ca.crt # Only users who match this filter can login and authorise to this machine. Note # that users who do NOT match, will still have their uid/gid resolve, but they # can't login. ldap_access_filter = (memberOf=cn=new_group,ou=groups,dc=example,dc=com) enumerate = false access_provider = ldap ldap_user_member_of = memberof ldap_user_gecos = cn ldap_user_uuid = nsUniqueId ldap_group_uuid = nsUniqueId # This is really important as it allows SSSD to respect nsAccountLock ldap_account_expire_policy = rhds ldap_access_order = filter, expire # Setup for ssh keys # Inside /etc/ssh/sshd_config add the lines: # AuthorizedKeysCommand /usr/bin/sss_ssh_authorizedkeys # AuthorizedKeysCommandUser nobody # You can test with the command: sss_ssh_authorizedkeys <username> ldap_user_ssh_public_key = nsSshPublicKey # This prevents an issue where the Directory is recursively walked on group # and user look ups. It makes the client faster and more responsive in almost # every scenario. ignore_group_members = False [sssd] services = nss, pam, ssh, sudo config_file_version = 2 domains = ldap [nss] homedir_substring = /home [32mINFO [0m tests.suites.clu.dsidm_config_test:dsidm_config_test.py:123 Check if config creation was successful [32mINFO [0m tests.suites.clu.dsidm_config_test:dsidm_config_test.py:47 Check if content is present in output [32mINFO [0m tests.suites.clu.dsidm_config_test:dsidm_config_test.py:59 Reset log file for next test | |||
Passed | suites/clu/dsidm_config_test.py::test_dsidm_config_ldap | 0.00 | |
------------------------------Captured stdout call------------------------------ # # OpenLDAP client configuration # Generated by 389 Directory Server - dsidm # # See ldap.conf(5) for details # This file should be world readable but not world writable. BASE dc=example,dc=com # Remember to check this: you can have multiple uris on this line. You may have # multiple servers or load balancers in your environment. URI ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:38901 # If you have DNS SRV records you can use: # URI ldaps:///dc%3Dexample%2Cdc%3Dcom DEREF never # To use cacert dir, place *.crt files in this path then run: # /usr/bin/openssl rehash /etc/openldap/certs # or (for older versions of openssl) # /usr/bin/c_rehash /etc/openldap/certs TLS_CACERTDIR /etc/openldap/certs # TLS_CACERT /etc/openldap/certs/ca.crt -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.clu.dsidm_config_test:dsidm_config_test.py:151 Create ldap.conf content [35mDEBUG [0m tests.suites.clu.dsidm_config_test:client_config.py:155 # # OpenLDAP client configuration # Generated by 389 Directory Server - dsidm # # See ldap.conf(5) for details # This file should be world readable but not world writable. BASE dc=example,dc=com # Remember to check this: you can have multiple uris on this line. You may have # multiple servers or load balancers in your environment. URI ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:38901 # If you have DNS SRV records you can use: # URI ldaps:///dc%3Dexample%2Cdc%3Dcom DEREF never # To use cacert dir, place *.crt files in this path then run: # /usr/bin/openssl rehash /etc/openldap/certs # or (for older versions of openssl) # /usr/bin/c_rehash /etc/openldap/certs TLS_CACERTDIR /etc/openldap/certs # TLS_CACERT /etc/openldap/certs/ca.crt [32mINFO [0m tests.suites.clu.dsidm_config_test:dsidm_config_test.py:154 Check if config creation was successful [32mINFO [0m tests.suites.clu.dsidm_config_test:dsidm_config_test.py:51 Check if content is present in output [32mINFO [0m tests.suites.clu.dsidm_config_test:dsidm_config_test.py:59 Reset log file for next test | |||
Passed | suites/clu/dsidm_config_test.py::test_dsidm_config_display | 4.40 | |
------------------------------Captured stdout call------------------------------ # This is a generic list of LDAP client configuration parameters you may require # for connecting a client to this server. Some of them may or may not apply # to your application, so consult your application documentation for further # assistance. # # This program makes a number of assumptions about your data and configuration # which may not be correct. Be sure to check these values for your situation. ; ldap uri ; This is the uri of the server you will connect to and authenticate to. It ; must be a valid subjectAltName in the presented TLS certificate. Note that this ; is not an exhaustive list of your LDAP servers, and other applications in your ; network like load balancers may affect this. This is just what we derive from ; your current connection. ldap_uri = ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:38901 ; ldap dns discovery uri ; In some environments, you may have DNS SRV records such as ; "_ldap._tcp.<domain name>". If these are present in your dns server, you can ; use the following uri. ldap_uri = ldaps:///dc%3Dexample%2Cdc%3Dcom ; ca_cert ; To correctly use TLS, you require the valid CA cert that issued your LDAP TLS ; certificates. Sometimes a copy of this may be in your server instance as ca_cert = /etc/dirsrv/slapd-<instance>/ca.crt ; However that's not guaranteed. You can show the certs from the LDAP server ; by sshing to the server and running: certutil -L -d /etc/dirsrv/slapd-<instance>/ ; If you can identify the CA certificate name, you can then view it with: certutil -L -n <ca cert name> -a -d /etc/dirsrv/slapd-<instance>/ ; This should be a pem file you can use in your application's CA. ; Some applications don't require a ca certificate parameter, and will use the ; ca certificate from /etc/openldap/ldap.conf. You should configure ldap.conf ; in these cases. See the 'client_config ldap.conf' command in dsidm. ; basedn ; The basedn is the root suffix where all searches will originate from for ; LDAP objects. basedn = dc=example,dc=com ; schema_type ; LDAP servers have different ways to structure their objects and group ; relationships. Legacy servers will use rfc2307, where as modern servers will ; use rfc2307bis (requires MemberOf plugin to be enabled). This is the schema ; setting of your directory based on your running configuration (if we can ; detect it). schema_type = rfc2307bis ; user/account basedn ; Some applications may optionally use a user/account basedn to limit searches ; in the directory. This can be for performance or security reasons. Generally ; you shouldn't need this, preferring to use groups and filters for access ; control. user_basedn = ou=people,dc=example,dc=com ; user filter ; This is an ldap filter that will return only user objects. Additionally some ; applications will template into the filter (similar to sql statements) or they ; will generate the filter based on attributes. We list a number of possible ; filters you might use, but you should customise this for your application. ; ; If you are using rfc2307bis, you can use this filter to provide authorisation ; support by adding filters such as: (memberOf=<groupdn>) user_filter = (&(objectclass=nsPerson)(objectclass=nsAccount)(objectclass=nsOrgPerson)(objectclass=posixAccount)) user_filter = (&(&(objectclass=nsPerson)(objectclass=nsAccount)(objectclass=nsOrgPerson)(objectclass=posixAccount))(|(uid=<PARAM>)(displayName=<PARAM>)(cn=<PARAM>))) ; group basedn ; Some applications may optionnaly use a group basedn to limit searches in the ; directory. This can be for performance or security reasons. Generally you ; shouldn't need this, preferring to use groups and filters for access control. group_basedn = ou=Groups,dc=example,dc=com ; group filter ; This is an ldap filter that will return only group objects. Additionally ; some applications will template into the filter (similar to sql statements) ; or they will generate the filter base on attributes. We list a number of ; possible filters you might use, but you should customise this for your ; application. group_filter = (&(objectclass=groupOfNames)) group_filter = (&(&(objectclass=groupOfNames))(|(cn=<PARAM>))) ; attribute mappings ; Due to the variety of schemas and attribute mappings in LDAP, there are ; different representations of attributes and values. This is a guess at ; the mappings that exist in your server, and what attributes you should ; configure and use. unique id = nsUniqueId user rdn = uid user identifier = uid group rdn = cn group member attribute = member # This is a generic list of LDAP client configuration parameters you may require # for connecting a client to this server. Some of them may or may not apply # to your application, so consult your application documentation for further # assistance. # # This program makes a number of assumptions about your data and configuration # which may not be correct. Be sure to check these values for your situation. ; ldap uri ; This is the uri of the server you will connect to and authenticate to. It ; must be a valid subjectAltName in the presented TLS certificate. Note that this ; is not an exhaustive list of your LDAP servers, and other applications in your ; network like load balancers may affect this. This is just what we derive from ; your current connection. ldap_uri = ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:38901 ; ldap dns discovery uri ; In some environments, you may have DNS SRV records such as ; "_ldap._tcp.<domain name>". If these are present in your dns server, you can ; use the following uri. ldap_uri = ldaps:///dc%3Dexample%2Cdc%3Dcom ; ca_cert ; To correctly use TLS, you require the valid CA cert that issued your LDAP TLS ; certificates. Sometimes a copy of this may be in your server instance as ca_cert = /etc/dirsrv/slapd-<instance>/ca.crt ; However that's not guaranteed. You can show the certs from the LDAP server ; by sshing to the server and running: certutil -L -d /etc/dirsrv/slapd-<instance>/ ; If you can identify the CA certificate name, you can then view it with: certutil -L -n <ca cert name> -a -d /etc/dirsrv/slapd-<instance>/ ; This should be a pem file you can use in your application's CA. ; Some applications don't require a ca certificate parameter, and will use the ; ca certificate from /etc/openldap/ldap.conf. You should configure ldap.conf ; in these cases. See the 'client_config ldap.conf' command in dsidm. ; basedn ; The basedn is the root suffix where all searches will originate from for ; LDAP objects. basedn = dc=example,dc=com ; schema_type ; LDAP servers have different ways to structure their objects and group ; relationships. Legacy servers will use rfc2307, where as modern servers will ; use rfc2307bis (requires MemberOf plugin to be enabled). This is the schema ; setting of your directory based on your running configuration (if we can ; detect it). schema_type = rfc2307bis ; user/account basedn ; Some applications may optionally use a user/account basedn to limit searches ; in the directory. This can be for performance or security reasons. Generally ; you shouldn't need this, preferring to use groups and filters for access ; control. user_basedn = ou=people,dc=example,dc=com ; user filter ; This is an ldap filter that will return only user objects. Additionally some ; applications will template into the filter (similar to sql statements) or they ; will generate the filter based on attributes. We list a number of possible ; filters you might use, but you should customise this for your application. ; ; If you are using rfc2307bis, you can use this filter to provide authorisation ; support by adding filters such as: (memberOf=<groupdn>) user_filter = (&(objectclass=nsPerson)(objectclass=nsAccount)(objectclass=nsOrgPerson)(objectclass=posixAccount)) user_filter = (&(&(objectclass=nsPerson)(objectclass=nsAccount)(objectclass=nsOrgPerson)(objectclass=posixAccount))(|(uid=<PARAM>)(displayName=<PARAM>)(cn=<PARAM>))) ; group basedn ; Some applications may optionnaly use a group basedn to limit searches in the ; directory. This can be for performance or security reasons. Generally you ; shouldn't need this, preferring to use groups and filters for access control. group_basedn = ou=Groups,dc=example,dc=com ; group filter ; This is an ldap filter that will return only group objects. Additionally ; some applications will template into the filter (similar to sql statements) ; or they will generate the filter base on attributes. We list a number of ; possible filters you might use, but you should customise this for your ; application. group_filter = (&(objectclass=groupOfNames)) group_filter = (&(&(objectclass=groupOfNames))(|(cn=<PARAM>))) ; attribute mappings ; Due to the variety of schemas and attribute mappings in LDAP, there are ; different representations of attributes and values. This is a guess at ; the mappings that exist in your server, and what attributes you should ; configure and use. unique id = nsUniqueId user rdn = uid user identifier = uid group rdn = cn group member attribute = member -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.clu.dsidm_config_test:dsidm_config_test.py:195 Test dsidm display option [35mDEBUG [0m tests.suites.clu.dsidm_config_test:client_config.py:290 # This is a generic list of LDAP client configuration parameters you may require # for connecting a client to this server. Some of them may or may not apply # to your application, so consult your application documentation for further # assistance. # # This program makes a number of assumptions about your data and configuration # which may not be correct. Be sure to check these values for your situation. ; ldap uri ; This is the uri of the server you will connect to and authenticate to. It ; must be a valid subjectAltName in the presented TLS certificate. Note that this ; is not an exhaustive list of your LDAP servers, and other applications in your ; network like load balancers may affect this. This is just what we derive from ; your current connection. ldap_uri = ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:38901 ; ldap dns discovery uri ; In some environments, you may have DNS SRV records such as ; "_ldap._tcp.<domain name>". If these are present in your dns server, you can ; use the following uri. ldap_uri = ldaps:///dc%3Dexample%2Cdc%3Dcom ; ca_cert ; To correctly use TLS, you require the valid CA cert that issued your LDAP TLS ; certificates. Sometimes a copy of this may be in your server instance as ca_cert = /etc/dirsrv/slapd-<instance>/ca.crt ; However that's not guaranteed. You can show the certs from the LDAP server ; by sshing to the server and running: certutil -L -d /etc/dirsrv/slapd-<instance>/ ; If you can identify the CA certificate name, you can then view it with: certutil -L -n <ca cert name> -a -d /etc/dirsrv/slapd-<instance>/ ; This should be a pem file you can use in your application's CA. ; Some applications don't require a ca certificate parameter, and will use the ; ca certificate from /etc/openldap/ldap.conf. You should configure ldap.conf ; in these cases. See the 'client_config ldap.conf' command in dsidm. ; basedn ; The basedn is the root suffix where all searches will originate from for ; LDAP objects. basedn = dc=example,dc=com ; schema_type ; LDAP servers have different ways to structure their objects and group ; relationships. Legacy servers will use rfc2307, where as modern servers will ; use rfc2307bis (requires MemberOf plugin to be enabled). This is the schema ; setting of your directory based on your running configuration (if we can ; detect it). schema_type = rfc2307bis ; user/account basedn ; Some applications may optionally use a user/account basedn to limit searches ; in the directory. This can be for performance or security reasons. Generally ; you shouldn't need this, preferring to use groups and filters for access ; control. user_basedn = ou=people,dc=example,dc=com ; user filter ; This is an ldap filter that will return only user objects. Additionally some ; applications will template into the filter (similar to sql statements) or they ; will generate the filter based on attributes. We list a number of possible ; filters you might use, but you should customise this for your application. ; ; If you are using rfc2307bis, you can use this filter to provide authorisation ; support by adding filters such as: (memberOf=<groupdn>) user_filter = (&(objectclass=nsPerson)(objectclass=nsAccount)(objectclass=nsOrgPerson)(objectclass=posixAccount)) user_filter = (&(&(objectclass=nsPerson)(objectclass=nsAccount)(objectclass=nsOrgPerson)(objectclass=posixAccount))(|(uid=<PARAM>)(displayName=<PARAM>)(cn=<PARAM>))) ; group basedn ; Some applications may optionnaly use a group basedn to limit searches in the ; directory. This can be for performance or security reasons. Generally you ; shouldn't need this, preferring to use groups and filters for access control. group_basedn = ou=Groups,dc=example,dc=com ; group filter ; This is an ldap filter that will return only group objects. Additionally ; some applications will template into the filter (similar to sql statements) ; or they will generate the filter base on attributes. We list a number of ; possible filters you might use, but you should customise this for your ; application. group_filter = (&(objectclass=groupOfNames)) group_filter = (&(&(objectclass=groupOfNames))(|(cn=<PARAM>))) ; attribute mappings ; Due to the variety of schemas and attribute mappings in LDAP, there are ; different representations of attributes and values. This is a guess at ; the mappings that exist in your server, and what attributes you should ; configure and use. unique id = nsUniqueId user rdn = uid user identifier = uid group rdn = cn group member attribute = member [32mINFO [0m tests.suites.clu.dsidm_config_test:dsidm_config_test.py:198 Check if display option was successful [32mINFO [0m tests.suites.clu.dsidm_config_test:dsidm_config_test.py:51 Check if content is present in output [32mINFO [0m tests.suites.clu.dsidm_config_test:dsidm_config_test.py:56 Check if value is present in output [32mINFO [0m tests.suites.clu.dsidm_config_test:dsidm_config_test.py:59 Reset log file for next test [32mINFO [0m tests.suites.clu.dsidm_config_test:dsidm_config_test.py:201 Enable MemberOf plugin [32mINFO [0m tests.suites.clu.dsidm_config_test:dsidm_config_test.py:206 Test dsidm display option with MemberOf plugin [35mDEBUG [0m tests.suites.clu.dsidm_config_test:client_config.py:290 # This is a generic list of LDAP client configuration parameters you may require # for connecting a client to this server. Some of them may or may not apply # to your application, so consult your application documentation for further # assistance. # # This program makes a number of assumptions about your data and configuration # which may not be correct. Be sure to check these values for your situation. ; ldap uri ; This is the uri of the server you will connect to and authenticate to. It ; must be a valid subjectAltName in the presented TLS certificate. Note that this ; is not an exhaustive list of your LDAP servers, and other applications in your ; network like load balancers may affect this. This is just what we derive from ; your current connection. ldap_uri = ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:38901 ; ldap dns discovery uri ; In some environments, you may have DNS SRV records such as ; "_ldap._tcp.<domain name>". If these are present in your dns server, you can ; use the following uri. ldap_uri = ldaps:///dc%3Dexample%2Cdc%3Dcom ; ca_cert ; To correctly use TLS, you require the valid CA cert that issued your LDAP TLS ; certificates. Sometimes a copy of this may be in your server instance as ca_cert = /etc/dirsrv/slapd-<instance>/ca.crt ; However that's not guaranteed. You can show the certs from the LDAP server ; by sshing to the server and running: certutil -L -d /etc/dirsrv/slapd-<instance>/ ; If you can identify the CA certificate name, you can then view it with: certutil -L -n <ca cert name> -a -d /etc/dirsrv/slapd-<instance>/ ; This should be a pem file you can use in your application's CA. ; Some applications don't require a ca certificate parameter, and will use the ; ca certificate from /etc/openldap/ldap.conf. You should configure ldap.conf ; in these cases. See the 'client_config ldap.conf' command in dsidm. ; basedn ; The basedn is the root suffix where all searches will originate from for ; LDAP objects. basedn = dc=example,dc=com ; schema_type ; LDAP servers have different ways to structure their objects and group ; relationships. Legacy servers will use rfc2307, where as modern servers will ; use rfc2307bis (requires MemberOf plugin to be enabled). This is the schema ; setting of your directory based on your running configuration (if we can ; detect it). schema_type = rfc2307bis ; user/account basedn ; Some applications may optionally use a user/account basedn to limit searches ; in the directory. This can be for performance or security reasons. Generally ; you shouldn't need this, preferring to use groups and filters for access ; control. user_basedn = ou=people,dc=example,dc=com ; user filter ; This is an ldap filter that will return only user objects. Additionally some ; applications will template into the filter (similar to sql statements) or they ; will generate the filter based on attributes. We list a number of possible ; filters you might use, but you should customise this for your application. ; ; If you are using rfc2307bis, you can use this filter to provide authorisation ; support by adding filters such as: (memberOf=<groupdn>) user_filter = (&(objectclass=nsPerson)(objectclass=nsAccount)(objectclass=nsOrgPerson)(objectclass=posixAccount)) user_filter = (&(&(objectclass=nsPerson)(objectclass=nsAccount)(objectclass=nsOrgPerson)(objectclass=posixAccount))(|(uid=<PARAM>)(displayName=<PARAM>)(cn=<PARAM>))) ; group basedn ; Some applications may optionnaly use a group basedn to limit searches in the ; directory. This can be for performance or security reasons. Generally you ; shouldn't need this, preferring to use groups and filters for access control. group_basedn = ou=Groups,dc=example,dc=com ; group filter ; This is an ldap filter that will return only group objects. Additionally ; some applications will template into the filter (similar to sql statements) ; or they will generate the filter base on attributes. We list a number of ; possible filters you might use, but you should customise this for your ; application. group_filter = (&(objectclass=groupOfNames)) group_filter = (&(&(objectclass=groupOfNames))(|(cn=<PARAM>))) ; attribute mappings ; Due to the variety of schemas and attribute mappings in LDAP, there are ; different representations of attributes and values. This is a guess at ; the mappings that exist in your server, and what attributes you should ; configure and use. unique id = nsUniqueId user rdn = uid user identifier = uid group rdn = cn group member attribute = member [32mINFO [0m tests.suites.clu.dsidm_config_test:dsidm_config_test.py:209 Check if display option was successful with MemberOf plugin enabled [32mINFO [0m tests.suites.clu.dsidm_config_test:dsidm_config_test.py:51 Check if content is present in output [32mINFO [0m tests.suites.clu.dsidm_config_test:dsidm_config_test.py:56 Check if value is present in output [32mINFO [0m tests.suites.clu.dsidm_config_test:dsidm_config_test.py:59 Reset log file for next test | |||
Passed | suites/clu/fixup_test.py::test_posix_winsync_fixup | 7.64 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.clu.fixup_test:fixup_test.py:73 Enable POSIXWinsyncPlugin [32mINFO [0m tests.suites.clu.fixup_test:fixup_test.py:77 Stopping the server and importing posix accounts [32mINFO [0m tests.suites.clu.fixup_test:fixup_test.py:87 Run Fixup task [32mINFO [0m tests.suites.clu.fixup_test:posix_winsync.py:29 Attempting to add task entry... [32mINFO [0m tests.suites.clu.fixup_test:posix_winsync.py:39 Successfully added task entry [32mINFO [0m tests.suites.clu.fixup_test:fixup_test.py:90 Check log if fixup task was successful | |||
Passed | suites/config/autotuning_test.py::test_threads_basic | 0.12 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:39 Set nsslapd-threadnumber: -1 to enable autotuning [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:42 Assert nsslapd-threadnumber is equal to the documented expected value | |||
Passed | suites/config/autotuning_test.py::test_threads_warning | 1.37 | |
No log output captured. | |||
Passed | suites/config/autotuning_test.py::test_threads_invalid_value[-2] | 0.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:87 Set nsslapd-threadnumber: -2. Operation should fail | |||
Passed | suites/config/autotuning_test.py::test_threads_invalid_value[0] | 0.03 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:87 Set nsslapd-threadnumber: 0. Operation should fail | |||
Passed | suites/config/autotuning_test.py::test_threads_invalid_value[invalid] | 0.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:87 Set nsslapd-threadnumber: invalid. Operation should fail | |||
Passed | suites/config/autotuning_test.py::test_threads_back_from_manual_value | 0.32 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:109 Set nsslapd-threadnumber: -1 to enable autotuning and save the new value [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:113 Set nsslapd-threadnumber to the autotuned value decreased by 2 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:118 Set nsslapd-threadnumber: -1 to enable autotuning [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:121 Assert nsslapd-threadnumber is back to the autotuned value | |||
Passed | suites/config/autotuning_test.py::test_cache_autosize_non_zero[-] | 4.96 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:169 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:170 nsslapd-dbcachesize == b'408620032' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:171 nsslapd-cachememsize == b'1409286144' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:172 nsslapd-dncachememsize == b'201326592' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:173 nsslapd-cache-autosize == b'25' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:174 nsslapd-cache-autosize-split == b'25' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:180 Delete nsslapd-cache-autosize [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:190 Delete nsslapd-cache-autosize-split [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:196 Trying to set nsslapd-cachememsize to 33333333 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:199 Trying to set nsslapd-dbcachesize to 33333333 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:216 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:217 nsslapd-dbcachesize == b'408620032' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:218 nsslapd-cachememsize == b'1409286144' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:219 nsslapd-dncachememsize == b'201326592' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:220 nsslapd-cache-autosize == b'25' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:221 nsslapd-cache-autosize-split == b'25' | |||
Passed | suites/config/autotuning_test.py::test_cache_autosize_non_zero[-0] | 4.77 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:169 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:170 nsslapd-dbcachesize == b'408620032' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:171 nsslapd-cachememsize == b'1409286144' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:172 nsslapd-dncachememsize == b'201326592' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:173 nsslapd-cache-autosize == b'25' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:174 nsslapd-cache-autosize-split == b'25' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:180 Delete nsslapd-cache-autosize [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:187 Set nsslapd-cache-autosize-split to 0 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:196 Trying to set nsslapd-cachememsize to 33333333 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:199 Trying to set nsslapd-dbcachesize to 33333333 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:216 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:217 nsslapd-dbcachesize == b'408620032' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:218 nsslapd-cachememsize == b'1409286144' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:219 nsslapd-dncachememsize == b'201326592' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:220 nsslapd-cache-autosize == b'25' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:221 nsslapd-cache-autosize-split == b'0' | |||
Passed | suites/config/autotuning_test.py::test_cache_autosize_non_zero[10-400] | 5.13 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:169 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:170 nsslapd-dbcachesize == b'408620032' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:171 nsslapd-cachememsize == b'1409286144' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:172 nsslapd-dncachememsize == b'201326592' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:173 nsslapd-cache-autosize == b'25' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:174 nsslapd-cache-autosize-split == b'0' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:177 Set nsslapd-cache-autosize to 10 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:187 Set nsslapd-cache-autosize-split to 40 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:196 Trying to set nsslapd-cachememsize to 33333333 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:199 Trying to set nsslapd-dbcachesize to 33333333 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:216 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:217 nsslapd-dbcachesize == b'261516820' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:218 nsslapd-cachememsize == b'469762048' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:219 nsslapd-dncachememsize == b'67108864' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:220 nsslapd-cache-autosize == b'10' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:221 nsslapd-cache-autosize-split == b'40' | |||
Passed | suites/config/autotuning_test.py::test_cache_autosize_non_zero[-40] | 5.75 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:169 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:170 nsslapd-dbcachesize == b'261516820' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:171 nsslapd-cachememsize == b'469762048' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:172 nsslapd-dncachememsize == b'67108864' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:173 nsslapd-cache-autosize == b'10' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:174 nsslapd-cache-autosize-split == b'40' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:180 Delete nsslapd-cache-autosize [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:187 Set nsslapd-cache-autosize-split to 40 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:196 Trying to set nsslapd-cachememsize to 33333333 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:199 Trying to set nsslapd-dbcachesize to 33333333 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:216 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:217 nsslapd-dbcachesize == b'817240064' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:218 nsslapd-cachememsize == b'1140850688' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:219 nsslapd-dncachememsize == b'134217728' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:220 nsslapd-cache-autosize == b'25' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:221 nsslapd-cache-autosize-split == b'40' | |||
Passed | suites/config/autotuning_test.py::test_cache_autosize_non_zero[10-] | 5.65 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:169 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:170 nsslapd-dbcachesize == b'817240064' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:171 nsslapd-cachememsize == b'1140850688' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:172 nsslapd-dncachememsize == b'134217728' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:173 nsslapd-cache-autosize == b'25' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:174 nsslapd-cache-autosize-split == b'40' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:177 Set nsslapd-cache-autosize to 10 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:190 Delete nsslapd-cache-autosize-split [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:196 Trying to set nsslapd-cachememsize to 33333333 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:199 Trying to set nsslapd-dbcachesize to 33333333 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:216 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:217 nsslapd-dbcachesize == b'163448012' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:218 nsslapd-cachememsize == b'603979776' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:219 nsslapd-dncachememsize == b'67108864' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:220 nsslapd-cache-autosize == b'10' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:221 nsslapd-cache-autosize-split == b'25' | |||
Passed | suites/config/autotuning_test.py::test_cache_autosize_non_zero[10-401] | 4.90 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:169 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:170 nsslapd-dbcachesize == b'163448012' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:171 nsslapd-cachememsize == b'603979776' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:172 nsslapd-dncachememsize == b'67108864' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:173 nsslapd-cache-autosize == b'10' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:174 nsslapd-cache-autosize-split == b'25' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:177 Set nsslapd-cache-autosize to 10 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:187 Set nsslapd-cache-autosize-split to 40 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:196 Trying to set nsslapd-cachememsize to 33333333 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:199 Trying to set nsslapd-dbcachesize to 33333333 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:216 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:217 nsslapd-dbcachesize == b'261516820' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:218 nsslapd-cachememsize == b'469762048' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:219 nsslapd-dncachememsize == b'67108864' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:220 nsslapd-cache-autosize == b'10' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:221 nsslapd-cache-autosize-split == b'40' | |||
Passed | suites/config/autotuning_test.py::test_cache_autosize_non_zero[10-0] | 5.19 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:169 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:170 nsslapd-dbcachesize == b'261516820' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:171 nsslapd-cachememsize == b'469762048' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:172 nsslapd-dncachememsize == b'67108864' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:173 nsslapd-cache-autosize == b'10' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:174 nsslapd-cache-autosize-split == b'40' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:177 Set nsslapd-cache-autosize to 10 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:187 Set nsslapd-cache-autosize-split to 0 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:196 Trying to set nsslapd-cachememsize to 33333333 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:199 Trying to set nsslapd-dbcachesize to 33333333 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:216 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:217 nsslapd-dbcachesize == b'163448012' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:218 nsslapd-cachememsize == b'603979776' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:219 nsslapd-dncachememsize == b'67108864' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:220 nsslapd-cache-autosize == b'10' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:221 nsslapd-cache-autosize-split == b'0' | |||
Passed | suites/config/autotuning_test.py::test_cache_autosize_basic_sane[0] | 9.91 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:273 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:274 nsslapd-dbcachesize == b'163448012' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:275 nsslapd-cachememsize == b'603979776' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:276 nsslapd-cache-autosize == b'0' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:277 nsslapd-cache-autosize-split == b'0' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:280 Set nsslapd-cache-autosize-split to 0 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:289 Set nsslapd-dbcachesize to 0 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:291 Set nsslapd-cachememsize to 0 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:307 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:308 nsslapd-dbcachesize == b'408620032' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:309 nsslapd-cachememsize == b'1409286144' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:310 nsslapd-dncachememsize == b'67108864' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:311 nsslapd-cache-autosize == b'0' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:312 nsslapd-cache-autosize-split == b'0' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:273 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:274 nsslapd-dbcachesize == b'408620032' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:275 nsslapd-cachememsize == b'1409286144' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:276 nsslapd-cache-autosize == b'0' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:277 nsslapd-cache-autosize-split == b'0' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:280 Set nsslapd-cache-autosize-split to 0 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:289 Set nsslapd-dbcachesize to 33333333 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:291 Set nsslapd-cachememsize to 33333333 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:307 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:308 nsslapd-dbcachesize == b'33333333' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:309 nsslapd-cachememsize == b'33333333' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:310 nsslapd-dncachememsize == b'67108864' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:311 nsslapd-cache-autosize == b'0' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:312 nsslapd-cache-autosize-split == b'0' | |||
Passed | suites/config/autotuning_test.py::test_cache_autosize_basic_sane[] | 10.16 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:273 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:274 nsslapd-dbcachesize == b'33333333' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:275 nsslapd-cachememsize == b'33333333' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:276 nsslapd-cache-autosize == b'0' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:277 nsslapd-cache-autosize-split == b'0' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:283 Delete nsslapd-cache-autosize-split [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:289 Set nsslapd-dbcachesize to 0 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:291 Set nsslapd-cachememsize to 0 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:307 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:308 nsslapd-dbcachesize == b'408620032' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:309 nsslapd-cachememsize == b'1409286144' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:310 nsslapd-dncachememsize == b'67108864' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:311 nsslapd-cache-autosize == b'0' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:312 nsslapd-cache-autosize-split == b'25' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:273 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:274 nsslapd-dbcachesize == b'408620032' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:275 nsslapd-cachememsize == b'1409286144' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:276 nsslapd-cache-autosize == b'0' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:277 nsslapd-cache-autosize-split == b'25' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:283 Delete nsslapd-cache-autosize-split [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:289 Set nsslapd-dbcachesize to 33333333 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:291 Set nsslapd-cachememsize to 33333333 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:307 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:308 nsslapd-dbcachesize == b'33333333' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:309 nsslapd-cachememsize == b'33333333' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:310 nsslapd-dncachememsize == b'67108864' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:311 nsslapd-cache-autosize == b'0' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:312 nsslapd-cache-autosize-split == b'25' | |||
Passed | suites/config/autotuning_test.py::test_cache_autosize_basic_sane[40] | 10.65 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:273 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:274 nsslapd-dbcachesize == b'33333333' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:275 nsslapd-cachememsize == b'33333333' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:276 nsslapd-cache-autosize == b'0' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:277 nsslapd-cache-autosize-split == b'25' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:280 Set nsslapd-cache-autosize-split to 40 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:289 Set nsslapd-dbcachesize to 0 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:291 Set nsslapd-cachememsize to 0 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:307 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:308 nsslapd-dbcachesize == b'817240064' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:309 nsslapd-cachememsize == b'1140850688' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:310 nsslapd-dncachememsize == b'67108864' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:311 nsslapd-cache-autosize == b'0' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:312 nsslapd-cache-autosize-split == b'40' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:273 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:274 nsslapd-dbcachesize == b'817240064' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:275 nsslapd-cachememsize == b'1140850688' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:276 nsslapd-cache-autosize == b'0' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:277 nsslapd-cache-autosize-split == b'40' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:280 Set nsslapd-cache-autosize-split to 40 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:289 Set nsslapd-dbcachesize to 33333333 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:291 Set nsslapd-cachememsize to 33333333 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:307 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:308 nsslapd-dbcachesize == b'33333333' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:309 nsslapd-cachememsize == b'33333333' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:310 nsslapd-dncachememsize == b'67108864' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:311 nsslapd-cache-autosize == b'0' [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:312 nsslapd-cache-autosize-split == b'40' | |||
Passed | suites/config/autotuning_test.py::test_cache_autosize_invalid_values[-2] | 0.43 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:346 Set nsslapd-cache-autosize-split to -2 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:352 Set nsslapd-cache-autosize to -2 | |||
Passed | suites/config/autotuning_test.py::test_cache_autosize_invalid_values[102] | 0.05 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:346 Set nsslapd-cache-autosize-split to 102 [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:352 Set nsslapd-cache-autosize to 102 | |||
Passed | suites/config/autotuning_test.py::test_cache_autosize_invalid_values[invalid] | 0.05 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:346 Set nsslapd-cache-autosize-split to invalid [32mINFO [0m tests.suites.config.autotuning_test:autotuning_test.py:352 Set nsslapd-cache-autosize to invalid | |||
Passed | suites/config/config_test.py::test_maxbersize_repl | 17.78 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 22d4e02d-eed7-40d7-9bd8-139f4c956787 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 0999225e-59af-47e3-aba3-4bbe0b3e2883 / got description=22d4e02d-eed7-40d7-9bd8-139f4c956787) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.config.config_test:config_test.py:69 Set nsslapd-maxbersize: 20K to master2 [32mINFO [0m tests.suites.config.config_test:config_test.py:74 Try to add attribute with a big value to master2 - expect to FAIL [32mINFO [0m tests.suites.config.config_test:config_test.py:81 Try to add attribute with a big value to master1 - expect to PASS [32mINFO [0m tests.suites.config.config_test:config_test.py:86 Check if a big value was successfully added to master1 [32mINFO [0m tests.suites.config.config_test:config_test.py:90 Check if a big value was successfully replicated to master2 | |||
Passed | suites/config/config_test.py::test_config_listen_backport_size | 0.04 | |
No log output captured. | |||
Passed | suites/config/config_test.py::test_config_deadlock_policy | 0.24 | |
No log output captured. | |||
Passed | suites/config/config_test.py::test_defaultnamingcontext | 1.73 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.config.config_test:config_test.py:220 Check the attribute nsslapd-defaultnamingcontext is present in cn=config [32mINFO [0m tests.suites.config.config_test:config_test.py:223 Delete nsslapd-defaultnamingcontext attribute [32mINFO [0m tests.suites.config.config_test:config_test.py:230 modify nsslapd-defaultnamingcontext with new suffix [32mINFO [0m tests.suites.config.config_test:config_test.py:233 Add new invalid value at runtime to nsslapd-defaultnamingcontext [32mINFO [0m tests.suites.config.config_test:config_test.py:237 Modify nsslapd-defaultnamingcontext with blank value [32mINFO [0m tests.suites.config.config_test:config_test.py:240 Add new suffix when nsslapd-defaultnamingcontext is empty [32mINFO [0m tests.suites.config.config_test:config_test.py:244 Check the value of the nsslapd-defaultnamingcontext automatically have the new suffix [32mINFO [0m tests.suites.config.config_test:config_test.py:247 Adding new suffix when nsslapd-defaultnamingcontext is not empty [32mINFO [0m tests.suites.config.config_test:config_test.py:251 Check the value of the nsslapd-defaultnamingcontext has not changed [32mINFO [0m tests.suites.config.config_test:config_test.py:254 Remove the newly added suffix and check the values of the attribute is not changed [32mINFO [0m tests.suites.config.config_test:config_test.py:258 Remove all the suffix at the end | |||
Passed | suites/config/config_test.py::test_allow_add_delete_config_attributes | 5.11 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.config.config_test:config_test.py:308 Add a new valid attribute at runtime to cn=config [32mINFO [0m tests.suites.config.config_test:config_test.py:312 Delete nsslapd-listenhost to restore the default value [32mINFO [0m tests.suites.config.config_test:config_test.py:317 Add new invalid attribute at runtime to cn=config [32mINFO [0m tests.suites.config.config_test:config_test.py:321 Make sure the invalid attribute is not added | |||
Passed | suites/config/config_test.py::test_ignore_virtual_attrs | 0.16 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.config.config_test:config_test.py:354 Check the attribute nsslapd-ignore-virtual-attrs is present in cn=config [32mINFO [0m tests.suites.config.config_test:config_test.py:357 Check the default value of attribute nsslapd-ignore-virtual-attrs should be OFF [32mINFO [0m tests.suites.config.config_test:config_test.py:360 Set the valid values i.e. on/ON and off/OFF for nsslapd-ignore-virtual-attrs [32mINFO [0m tests.suites.config.config_test:config_test.py:365 Set invalid value for attribute nsslapd-ignore-virtual-attrs [32mINFO [0m tests.suites.config.config_test:config_test.py:376 Add cosPointer, cosTemplate and test entry to default suffix, where virtual attribute is postal code [32mINFO [0m tests.suites.config.config_test:config_test.py:389 Test if virtual attribute i.e. postal code shown in test entry while nsslapd-ignore-virtual-attrs: off [32mINFO [0m tests.suites.config.config_test:config_test.py:392 Set nsslapd-ignore-virtual-attrs=on [32mINFO [0m tests.suites.config.config_test:config_test.py:395 Test if virtual attribute i.e. postal code not shown while nsslapd-ignore-virtual-attrs: on | |||
Passed | suites/config/config_test.py::test_ndn_cache_enabled | 10.09 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.config.config_test:config_test.py:423 Check the attribute nsslapd-ndn-cache-enabled is present in cn=config [32mINFO [0m tests.suites.config.config_test:config_test.py:426 Check the attribute nsslapd-ndn-cache-enabled has the default value set as ON [32mINFO [0m tests.suites.config.config_test:config_test.py:429 Check the attribute nsslapd-ndn-cache-max-size is present in cn=config [32mINFO [0m tests.suites.config.config_test:config_test.py:435 Ticket#49593 : NDN cache stats should be under the global stats - Implemented in 1.4 [32mINFO [0m tests.suites.config.config_test:config_test.py:436 Fetch the monitor value according to the ds version [32mINFO [0m tests.suites.config.config_test:config_test.py:442 Check the backend monitor output for Normalized DN cache statistics, while nsslapd-ndn-cache-enabled is off [32mINFO [0m tests.suites.config.config_test:config_test.py:448 Check the backend monitor output for Normalized DN cache statistics, while nsslapd-ndn-cache-enabled is on [32mINFO [0m tests.suites.config.config_test:config_test.py:454 Set invalid value for nsslapd-ndn-cache-enabled [32mINFO [0m tests.suites.config.config_test:config_test.py:458 Set invalid value for nsslapd-ndn-cache-max-size | |||
Passed | suites/config/config_test.py::test_require_index | 2.70 | |
No log output captured. | |||
Passed | suites/config/config_test.py::test_require_internal_index | 7.44 | |
No log output captured. | |||
Passed | suites/config/regression_test.py::test_maxbersize_repl | 8.03 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.config.regression_test:regression_test.py:100 Set nsslapd-errorlog-maxlogsize before nsslapd-errorlog-logmaxdiskspace [32mINFO [0m tests.suites.config.regression_test:regression_test.py:104 Assert no init_dse_file errors in the error log [32mINFO [0m tests.suites.config.regression_test:regression_test.py:108 Set nsslapd-errorlog-maxlogsize after nsslapd-errorlog-logmaxdiskspace [32mINFO [0m tests.suites.config.regression_test:regression_test.py:112 Assert no init_dse_file errors in the error log | |||
Passed | suites/config/removed_config_49298_test.py::test_restore_config | 4.07 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.config.removed_config_49298_test:removed_config_49298_test.py:43 /etc/dirsrv/slapd-standalone1 | |||
Passed | suites/config/removed_config_49298_test.py::test_removed_config | 2.18 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.config.removed_config_49298_test:removed_config_49298_test.py:72 /etc/dirsrv/slapd-standalone1 | |||
Passed | suites/cos/cos_test.py::test_positive | 0.59 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/cos/indirect_cos_test.py::test_indirect_cos | 1.63 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m tests.suites.cos.indirect_cos_test:indirect_cos_test.py:113 Add custom schema... [32mINFO [0m tests.suites.cos.indirect_cos_test:indirect_cos_test.py:126 Add test user... [32mINFO [0m tests.suites.cos.indirect_cos_test:indirect_cos_test.py:143 Setup indirect COS... ------------------------------Captured stdout call------------------------------ Successfully created subtree password policy -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.cos.indirect_cos_test:indirect_cos_test.py:163 Checking user... [32mINFO [0m tests.suites.cos.indirect_cos_test:indirect_cos_test.py:60 Create password policy for subtree ou=people,dc=example,dc=com [32mINFO [0m tests.suites.cos.indirect_cos_test:indirect_cos_test.py:170 Checking user... | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_verify_operation_when_disk_monitoring_is_off | 5.02 | |
-----------------------------Captured stdout setup------------------------------ Relabeled /var/log/dirsrv/slapd-standalone1 from unconfined_u:object_r:user_tmp_t:s0 to system_u:object_r:dirsrv_var_log_t:s0 -----------------------------Captured stderr setup------------------------------ chown: cannot access '/var/log/dirsrv/slapd-standalone1/*': No such file or directory -------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. ------------------------------Captured stderr call------------------------------ 25+0 records in 25+0 records out 26214400 bytes (26 MB, 25 MiB) copied, 0.0147511 s, 1.8 GB/s dd: error writing '/var/log/dirsrv/slapd-standalone1/foo1': No space left on device 10+0 records in 9+0 records out 10465280 bytes (10 MB, 10 MiB) copied, 0.00592523 s, 1.8 GB/s | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_free_up_the_disk_space_and_change_ds_config | 4.67 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_verify_operation_with_nsslapd_disk_monitoring_logging_critical_off | 35.13 | |
------------------------------Captured stderr call------------------------------ 10+0 records in 10+0 records out 10485760 bytes (10 MB, 10 MiB) copied, 0.00844241 s, 1.2 GB/s | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_operation_with_nsslapd_disk_monitoring_logging_critical_on_below_half_of_the_threshold | 25.80 | |
------------------------------Captured stderr call------------------------------ 31+0 records in 31+0 records out 32505856 bytes (33 MB, 31 MiB) copied, 0.0293369 s, 1.1 GB/s | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_setting_nsslapd_disk_monitoring_logging_critical_to_off | 3.91 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_operation_with_nsslapd_disk_monitoring_logging_critical_off | 92.81 | |
------------------------------Captured stderr call------------------------------ 10+0 records in 10+0 records out 10485760 bytes (10 MB, 10 MiB) copied, 0.0144777 s, 724 MB/s | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_operation_with_nsslapd_disk_monitoring_logging_critical_off_below_half_of_the_threshold | 179.93 | |
------------------------------Captured stderr call------------------------------ 30+0 records in 30+0 records out 31457280 bytes (31 MB, 30 MiB) copied, 0.0507252 s, 620 MB/s | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_go_straight_below_half_of_the_threshold | 108.54 | |
------------------------------Captured stderr call------------------------------ 31+0 records in 31+0 records out 32505856 bytes (33 MB, 31 MiB) copied, 0.0543054 s, 599 MB/s | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_readonly_on_threshold | 28.96 | |
------------------------------Captured stderr call------------------------------ 10+0 records in 10+0 records out 10485760 bytes (10 MB, 10 MiB) copied, 0.0130363 s, 804 MB/s | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_readonly_on_threshold_below_half_of_the_threshold | 51.04 | |
------------------------------Captured stderr call------------------------------ 31+0 records in 31+0 records out 32505856 bytes (33 MB, 31 MiB) copied, 0.0199869 s, 1.6 GB/s | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_below_half_of_the_threshold_not_starting_after_shutdown | 113.51 | |
------------------------------Captured stderr call------------------------------ 31+0 records in 31+0 records out 32505856 bytes (33 MB, 31 MiB) copied, 0.0206149 s, 1.6 GB/s -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:disk_monitoring_test.py:582 Instance start up has failed as expected | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_go_straight_below_4kb | 19.01 | |
------------------------------Captured stderr call------------------------------ 25+0 records in 25+0 records out 26214400 bytes (26 MB, 25 MiB) copied, 0.013496 s, 1.9 GB/s dd: error writing '/var/log/dirsrv/slapd-standalone1/foo1': No space left on device 10+0 records in 9+0 records out 10174464 bytes (10 MB, 9.7 MiB) copied, 0.00639742 s, 1.6 GB/s | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_threshold_to_overflow_value | 0.06 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_threshold_is_reached_to_half | 14.66 | |
------------------------------Captured stderr call------------------------------ 10+0 records in 10+0 records out 10485760 bytes (10 MB, 10 MiB) copied, 0.0165072 s, 635 MB/s | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-threshold--2] | 0.01 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-threshold-9223372036854775808] | 0.01 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-threshold-2047] | 0.01 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-threshold-0] | 0.02 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-threshold--1294967296] | 0.01 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-threshold-invalid] | 0.00 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-invalid] | 0.00 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-1] | 0.01 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-grace-period-00] | 0.01 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-grace-period-525 948] | 0.01 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-grace-period--10] | 0.00 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-logging-critical-oninvalid] | 0.01 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-grace-period--11] | 0.01 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-grace-period-01] | 0.01 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_valid_operations_are_permitted | 6.36 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_space_test.py::test_basic | 0.00 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:disk_space_test.py:37 Check that "partition", "size", "used", "available", "use%" words are present in the string [32mINFO [0m lib389:disk_space_test.py:41 Check that the sizes are numbers | |||
Passed | suites/ds_logs/ds_logs_test.py::test_check_default | 0.01 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [35mDEBUG [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:233 on | |||
Passed | suites/ds_logs/ds_logs_test.py::test_plugin_set_invalid | 0.13 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:252 test_plugin_set_invalid - Expect to fail with junk value | |||
Passed | suites/ds_logs/ds_logs_test.py::test_log_plugin_on | 5.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:279 Bug 1273549 - Check access logs for millisecond, when attribute is ON [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:280 perform any ldap operation, which will trigger the logs [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:36 Adding 10 users [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:284 Restart the server to flush the logs [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:287 parse the access logs | |||
Passed | suites/ds_logs/ds_logs_test.py::test_log_plugin_off | 13.24 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:319 Bug 1273549 - Check access logs for missing millisecond, when attribute is OFF [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:321 test_log_plugin_off - set the configuration attribute to OFF [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:324 Restart the server to flush the logs [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:327 test_log_plugin_off - delete the previous access logs [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:36 Adding 10 users [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:334 Restart the server to flush the logs [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:337 check access log that microseconds are not present | |||
Passed | suites/ds_logs/ds_logs_test.py::test_internal_log_server_level_0 | 4.88 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:200 Disable access log buffering -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:365 Set nsslapd-plugin-logging to on [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:368 Configure access log level to 0 [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:372 Restart the server to flush the logs [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:376 Check if access log does not contain internal log of MOD operation [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:382 Check if the other internal operations are not present | |||
Passed | suites/ds_logs/ds_logs_test.py::test_internal_log_server_level_4 | 7.66 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:200 Disable access log buffering -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:414 Set nsslapd-plugin-logging to on [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:417 Configure access log level to 4 [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:421 Restart the server to flush the logs [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:426 Check if access log contains internal MOD operation in correct format [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:432 Check if the other internal operations have the correct format | |||
Passed | suites/ds_logs/ds_logs_test.py::test_internal_log_level_260 | 7.56 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:102 Enable automember plugin [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:106 Enable Referential Integrity plugin [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:110 Set nsslapd-plugin-logging to on [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:113 Restart the server [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:120 Configure access log level to 260 [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:86 Renaming user [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:89 Delete the user [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:92 Delete automember entry, org. unit and group for the next test [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:200 Disable access log buffering -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:472 Restart the server to flush the logs [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:476 Check the access logs for ADD operation of the user [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:492 Check the access logs for MOD operation of the user [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:506 Check the access logs for DEL operation of the user [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:518 Check if the other internal operations have the correct format | |||
Passed | suites/ds_logs/ds_logs_test.py::test_internal_log_level_131076 | 8.15 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:120 Configure access log level to 131076 [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:86 Renaming user [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:89 Delete the user [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:92 Delete automember entry, org. unit and group for the next test [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:200 Disable access log buffering -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:557 Restart the server to flush the logs [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:561 Check the access logs for ADD operation of the user [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:577 Check the access logs for MOD operation of the user [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:591 Check the access logs for DEL operation of the user [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:603 Check if the other internal operations have the correct format | |||
Passed | suites/ds_logs/ds_logs_test.py::test_internal_log_level_516 | 7.94 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:120 Configure access log level to 516 [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:86 Renaming user [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:89 Delete the user [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:92 Delete automember entry, org. unit and group for the next test [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:200 Disable access log buffering -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:642 Restart the server to flush the logs [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:646 Check the access logs for ADD operation of the user [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:663 Check the access logs for MOD operation of the user [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:680 Check the access logs for DEL operation of the user [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:695 Check if the other internal operations have the correct format | |||
Passed | suites/ds_logs/ds_logs_test.py::test_access_log_truncated_search_message | 4.97 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:722 Make a search [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:725 Restart the server to flush the logs | |||
Passed | suites/ds_logs/ds_logs_test.py::test_etime_order_of_magnitude | 2.37 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:200 Disable access log buffering -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:811 add_users [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:36 Adding 30 users [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:814 search users [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:817 parse the access logs to get the SRCH string [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:825 get the operation start time from the SRCH string [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:829 get the OP number from the SRCH string [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:833 get the RESULT string matching the SRCH OP number [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:842 get the operation end time from the RESULT string [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:846 get the logged etime for the operation from the RESULT string [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:853 Calculate the ratio between logged etime for the operation and elapsed time from its start time to its end time - should be around 1 | |||
Passed | suites/ds_logs/ds_logs_test.py::test_optime_and_wtime_keywords | 2.42 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:200 Disable access log buffering -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:889 add_users [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:36 Adding 30 users [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:892 search users [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:895 parse the access logs to get the SRCH string [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:903 get the OP number from the SRCH string [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:907 get the RESULT string matching the SRCH op number [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:914 Search for the wtime keyword in the RESULT string [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:917 get the wtime value from the RESULT string [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:920 Search for the optime keyword in the RESULT string [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:923 get the optime value from the RESULT string [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:926 get the etime value from the RESULT string [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:929 Check that (wtime + optime) is approximately equal to etime i.e. their ratio is 1 | |||
Passed | suites/ds_logs/ds_logs_test.py::test_log_base_dn_when_invalid_attr_request | 4.35 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:200 Disable access log buffering -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:961 delete the previous access logs to get a fresh new one [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:964 Search the default suffix, with invalid '"" ""' attribute request [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:965 A Protocol error exception should be raised, see https://github.com/389ds/389-ds-base/issues/3028 [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:971 Check the access logs for correct messages | |||
Passed | suites/ds_logs/ds_logs_test.py::test_audit_log_rotate_and_check_string | 14.05 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:1017 Doing modifications to rotate audit log [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:1022 Doing one more modification just in case [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:1029 Check that DS string is present on first line [32mINFO [0m tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:1033 Check that DS string is present only once | |||
Passed | suites/ds_logs/regression_test.py::test_default_loglevel_stripped[24576] | 0.01 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/ds_logs/regression_test.py::test_default_loglevel_stripped[16512] | 0.04 | |
No log output captured. | |||
Passed | suites/ds_logs/regression_test.py::test_default_loglevel_stripped[16385] | 1.48 | |
No log output captured. | |||
Passed | suites/ds_logs/regression_test.py::test_dse_config_loglevel_error | 16.64 | |
No log output captured. | |||
Passed | suites/ds_tools/logpipe_test.py::test_user_permissions | 0.06 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m tests.suites.ds_tools.logpipe_test:logpipe_test.py:32 Add system test user - dirsrv_testuser -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.ds_tools.logpipe_test:logpipe_test.py:68 Try to create a logpipe in the log directory with "-u" option specifying the user | |||
Passed | suites/ds_tools/replcheck_test.py::test_state | 0.49 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect de4c1049-da47-472f-8fa8-db7aba0a341c / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 765fca1e-4467-4898-b08a-3265d6165016 / got description=de4c1049-da47-472f-8fa8-db7aba0a341c) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect eff300a2-d4af-4b63-902b-41753146a356 / got description=765fca1e-4467-4898-b08a-3265d6165016) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 is working [32mINFO [0m tests.suites.ds_tools.replcheck_test:replcheck_test.py:101 Export LDAPTLS_CACERTDIR env variable for ds-replcheck [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect ac47c3c7-b4e1-49d9-9c84-9f58fc39133c / got description=eff300a2-d4af-4b63-902b-41753146a356) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect 11bae2e2-54cc-425e-bfbb-db8b17d13291 / got description=ac47c3c7-b4e1-49d9-9c84-9f58fc39133c) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is working | |||
Passed | suites/ds_tools/replcheck_test.py::test_check_ruv | 19.23 | |
------------------------------Captured stderr call------------------------------ ldiffile: /tmp/export_master1.ldif ldiffile: /tmp/export_master2.ldif | |||
Passed | suites/ds_tools/replcheck_test.py::test_missing_entries | 19.86 | |
------------------------------Captured stderr call------------------------------ ldiffile: /tmp/export_master1.ldif ldiffile: /tmp/export_master2.ldif | |||
Passed | suites/ds_tools/replcheck_test.py::test_tombstones | 25.00 | |
------------------------------Captured stderr call------------------------------ ldiffile: /tmp/export_master1.ldif ldiffile: /tmp/export_master2.ldif | |||
Passed | suites/ds_tools/replcheck_test.py::test_conflict_entries | 31.56 | |
------------------------------Captured stderr call------------------------------ ldiffile: /tmp/export_master1.ldif ldiffile: /tmp/export_master2.ldif | |||
Passed | suites/ds_tools/replcheck_test.py::test_inconsistencies | 27.01 | |
------------------------------Captured stderr call------------------------------ ldiffile: /tmp/export_master1.ldif ldiffile: /tmp/export_master2.ldif | |||
Passed | suites/ds_tools/replcheck_test.py::test_suffix_exists | 0.50 | |
No log output captured. | |||
Passed | suites/ds_tools/replcheck_test.py::test_check_missing_tombstones | 22.01 | |
------------------------------Captured stderr call------------------------------ ldiffile: /tmp/export_master1.ldif ldiffile: /tmp/export_master2.ldif | |||
Passed | suites/ds_tools/replcheck_test.py::test_dsreplcheck_with_password_file | 0.01 | |
No log output captured. | |||
Passed | suites/ds_tools/replcheck_test.py::test_dsreplcheck_timeout_connection_mechanisms | 2.12 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.ds_tools.replcheck_test:replcheck_test.py:535 Run ds-replcheck with -t option | |||
Passed | suites/dynamic_plugins/dynamic_plugins_test.py::test_acceptance | 43.29 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect a18382a6-80f3-4b10-9021-4fdf5e4b629f / got description=bb47b52d-69b4-4055-8b45-4b6cd9a1092e) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 519fdb9b-440b-45ca-8efc-cb7b4cfd5d94 / got description=a18382a6-80f3-4b10-9021-4fdf5e4b629f) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect f74821cd-1a9d-4dec-a18e-7c0758033b45 / got description=519fdb9b-440b-45ca-8efc-cb7b4cfd5d94) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working | |||
Passed | suites/dynamic_plugins/dynamic_plugins_test.py::test_memory_corruption | 45.68 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 73b503a5-0841-4e76-9dd9-b316620c90c5 / got description=f74821cd-1a9d-4dec-a18e-7c0758033b45) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 1a6619cf-9309-44ac-87b4-1a7f04aabb44 / got description=73b503a5-0841-4e76-9dd9-b316620c90c5) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working | |||
Passed | suites/dynamic_plugins/dynamic_plugins_test.py::test_stress | 906.67 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 4dfdb329-6388-4296-82d0-907d1990bcbb / got description=1a6619cf-9309-44ac-87b4-1a7f04aabb44) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 4dfdb329-6388-4296-82d0-907d1990bcbb / got description=1a6619cf-9309-44ac-87b4-1a7f04aabb44) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 4dfdb329-6388-4296-82d0-907d1990bcbb / got description=1a6619cf-9309-44ac-87b4-1a7f04aabb44) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ec7516f0-7273-4667-a5cc-11d2e095163d / got description=4dfdb329-6388-4296-82d0-907d1990bcbb) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working | |||
Passed | suites/dynamic_plugins/notice_for_restart_test.py::test_notice_when_dynamic_not_enabled | 4.90 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/export/export_test.py::test_dbtasks_db2ldif_with_non_accessible_ldif_file_path | 5.33 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. ------------------------------Captured stderr call------------------------------ ldiffile: /tmp/nonexistent/export.ldif -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:export_test.py:63 Stopping the instance... [32mINFO [0m lib389.utils:export_test.py:66 Performing an offline export to a non accessible ldif file path - should fail properly [31mCRITICAL[0m LogCapture:dbtasks.py:40 db2ldif failed [32mINFO [0m lib389.utils:export_test.py:33 checking output msg [32mINFO [0m lib389.utils:export_test.py:38 Clear the log [32mINFO [0m lib389.utils:export_test.py:70 parsing the errors log to search for the error reported [32mINFO [0m lib389.utils:export_test.py:79 Restarting the instance... | |||
Passed | suites/export/export_test.py::test_db2ldif_cli_with_non_accessible_ldif_file_path | 5.89 | |
------------------------------Captured stdout call------------------------------ db2ldif failed ------------------------------Captured stderr call------------------------------ ldiffile: /tmp/nonexistent/export.ldif -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:export_test.py:104 Stopping the instance... [32mINFO [0m lib389.utils:export_test.py:107 Performing an offline export to a non accessible ldif file path - should fail properly [32mINFO [0m lib389.utils:export_test.py:115 db2ldif failed properly: error (1) [32mINFO [0m lib389.utils:export_test.py:118 parsing the errors log to search for the error reported [32mINFO [0m lib389.utils:export_test.py:121 error string : '[09/Nov/2020:20:34:37.911490710 -0500] - ERR - bdb_db2ldif - db2ldif: userRoot: can\'t open /tmp/nonexistent/export.ldif: 2 (No such file or directory) while running as user "dirsrv"\n', '[09/Nov/2020:20:34:43.801709790 -0500] - ERR - bdb_db2ldif - db2ldif: userRoot: can\'t open /tmp/nonexistent/export.ldif: 2 (No such file or directory) while running as user "dirsrv"\n' [32mINFO [0m lib389.utils:export_test.py:123 Restarting the instance... | |||
Passed | suites/filter/basic_filter_test.py::test_search_attr | 0.41 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/filter/bitw_filter_test.py::test_bitwise_plugin_status | 0.00 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/filter/bitw_filter_test.py::test_search_disabled_accounts | 0.01 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_plugin_can_be_disabled | 4.07 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_plugin_is_disabled | 0.01 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_enabling_works_fine | 4.75 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=513))-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=16777216))-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=8388608))-1] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.804:=5))-3] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.804:=8))-3] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.804:=7))-5] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testerperson) (testUserAccountControl:1.2.840.113556.1.4.804:=7))-0] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (&(testUserAccountControl:1.2.840.113556.1.4.803:=98536)(testUserAccountControl:1.2.840.113556.1.4.803:=912)))-0] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (&(testUserAccountControl:1.2.840.113556.1.4.804:=87)(testUserAccountControl:1.2.840.113556.1.4.804:=91)))-8] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (&(testUserAccountControl:1.2.840.113556.1.4.803:=89536)(testUserAccountControl:1.2.840.113556.1.4.804:=79)))-1] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (|(testUserAccountControl:1.2.840.113556.1.4.803:=89536)(testUserAccountControl:1.2.840.113556.1.4.804:=79)))-8] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (|(testUserAccountControl:1.2.840.113556.1.4.803:=89)(testUserAccountControl:1.2.840.113556.1.4.803:=536)))-0] | 0.00 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=x))-13] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=&\*#$%))-13] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=-65536))-0] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=-1))-0] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=-))-13] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=))-13] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=\*))-13] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.804:=\*))-0] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=6552))-0] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson\))(testUserAccountControl:1.2.840.113556.1.4.804:=6552))-0] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=65536))-5] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_5_entries | 0.31 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_5_entries1 | 0.26 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_5_entries3 | 0.04 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_5_entries4 | 0.02 | |
No log output captured. | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(uid=uid1)(sn=last1)(givenname=first1))-1] | 0.01 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=uid1)(sn=last1)(givenname=first1))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(uid=uid1)(&(sn=last1)(givenname=first1)))-1] | 0.00 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=uid1)(&(sn=last1)(givenname=first1)))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(uid=uid1)(&(&(sn=last1))(&(givenname=first1))))-1] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=uid1)(&(&(sn=last1))(&(givenname=first1))))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(uid=*)(sn=last3)(givenname=*))-1] | 0.00 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=*)(sn=last3)(givenname=*))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(uid=*)(&(sn=last3)(givenname=*)))-1] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=*)(&(sn=last3)(givenname=*)))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(uid=uid5)(&(&(sn=*))(&(givenname=*))))-1] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=uid5)(&(&(sn=*))(&(givenname=*))))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(objectclass=*)(uid=*)(sn=last*))-5] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(objectclass=*)(uid=*)(sn=last*))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(objectclass=*)(uid=*)(sn=last1))-1] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(objectclass=*)(uid=*)(sn=last1))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(|(uid=uid1)(sn=last1)(givenname=first1))-1] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(uid=uid1)(sn=last1)(givenname=first1))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(|(uid=uid1)(|(sn=last1)(givenname=first1)))-1] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(uid=uid1)(|(sn=last1)(givenname=first1)))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(|(uid=uid1)(|(|(sn=last1))(|(givenname=first1))))-1] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(uid=uid1)(|(|(sn=last1))(|(givenname=first1))))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(|(objectclass=*)(sn=last1)(|(givenname=first1)))-18] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(objectclass=*)(sn=last1)(|(givenname=first1)))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(|(&(objectclass=*)(sn=last1))(|(givenname=first1)))-1] | 0.00 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(&(objectclass=*)(sn=last1))(|(givenname=first1)))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(|(&(objectclass=*)(sn=last))(|(givenname=first1)))-1] | 0.00 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(&(objectclass=*)(sn=last))(|(givenname=first1)))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(uid=uid1)(!(cn=NULL)))-1] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=uid1)(!(cn=NULL)))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(!(cn=NULL))(uid=uid1))-1] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(!(cn=NULL))(uid=uid1))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(uid=*)(&(!(uid=1))(!(givenname=first1))))-5] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=*)(&(!(uid=1))(!(givenname=first1))))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(|(uid=uid1)(uid=NULL))(sn=last1))-1] | 0.00 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(|(uid=uid1)(uid=NULL))(sn=last1))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(|(uid=uid1)(uid=NULL))(!(sn=NULL)))-1] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(|(uid=uid1)(uid=NULL))(!(sn=NULL)))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(|(uid=uid1)(sn=last2))(givenname=first1))-1] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(|(uid=uid1)(sn=last2))(givenname=first1))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(|(&(uid=uid1)(!(uid=NULL)))(sn=last2))-2] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(&(uid=uid1)(!(uid=NULL)))(sn=last2))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(|(&(uid=uid1)(uid=NULL))(sn=last2))-1] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(&(uid=uid1)(uid=NULL))(sn=last2))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(uid=uid5)(sn=*)(cn=*)(givenname=*)(uid=u*)(sn=la*)(cn=full*)(givenname=f*)(uid>=u)(!(givenname=NULL)))-1] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=uid5)(sn=*)(cn=*)(givenname=*)(uid=u*)(sn=la*)(cn=full*)(givenname=f*)(uid>=u)(!(givenname=NULL)))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(|(&(objectclass=*)(sn=last))(&(givenname=first1)))-1] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(&(objectclass=*)(sn=last))(&(givenname=first1)))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(uid=uid1)(sn=last1)(givenname=NULL))-0] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=uid1)(sn=last1)(givenname=NULL))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(uid=uid1)(&(sn=last1)(givenname=NULL)))-0] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=uid1)(&(sn=last1)(givenname=NULL)))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(uid=uid1)(&(&(sn=last1))(&(givenname=NULL))))-0] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=uid1)(&(&(sn=last1))(&(givenname=NULL))))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(uid=uid1)(&(&(sn=last1))(&(givenname=NULL)(sn=*)))(|(sn=NULL)))-0] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=uid1)(&(&(sn=last1))(&(givenname=NULL)(sn=*)))(|(sn=NULL)))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(uid=uid1)(&(&(sn=last*))(&(givenname=first*)))(&(sn=NULL)))-0] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=uid1)(&(&(sn=last*))(&(givenname=first*)))(&(sn=NULL)))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(|(uid=NULL)(sn=NULL)(givenname=NULL))-0] | 0.00 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(uid=NULL)(sn=NULL)(givenname=NULL))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(|(uid=NULL)(|(sn=NULL)(givenname=NULL)))-0] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(uid=NULL)(|(sn=NULL)(givenname=NULL)))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(|(uid=NULL)(|(|(sn=NULL))(|(givenname=NULL))))-0] | 0.00 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(uid=NULL)(|(|(sn=NULL))(|(givenname=NULL))))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(|(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*))-6] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(uid>=uid3)-3] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(uid>=uid3)"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(uid=*)(uid>=uid3))-3] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=*)(uid>=uid3))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(|(uid>=uid3)(uid<=uid5))-6] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(uid>=uid3)(uid<=uid5))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(uid>=uid3)(uid<=uid5))-3] | 0.00 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid>=uid3)(uid<=uid5))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(|(&(uid>=uid3)(uid<=uid5))(uid=*))-6] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(&(uid>=uid3)(uid<=uid5))(uid=*))"... | |||
Passed | suites/filter/filter_cert_test.py::test_positive | 27.72 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index0] | 0.02 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index1] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index2] | 0.13 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index3] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index4] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index5] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index6] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index7] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index8] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index9] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index10] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index11] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index12] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index13] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index14] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index15] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index16] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index17] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod0] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod1] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod2] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod3] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod4] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod5] | 0.06 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod6] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod7] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod8] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod9] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod10] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod11] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod12] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod13] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod14] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod15] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod16] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod17] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode0] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode1] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode2] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode3] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode4] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode5] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode6] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode7] | 0.15 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode8] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode9] | 0.27 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode10] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode11] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode12] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode13] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode14] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode15] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode16] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode17] | 0.27 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode0] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode1] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode2] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode3] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode4] | 0.06 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode5] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode6] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode7] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode8] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode9] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode10] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode11] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode12] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode13] | 0.29 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode14] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode15] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode16] | 0.06 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode17] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_search_positive_negative | 0.48 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrbitStringMatch:bitStringMatch:='0001'B)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseExactIA5Match:caseExactIA5Match:=Sprain)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseExactMatch:caseExactMatch:=ÇélIné Ändrè)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseExactMatch:caseExactOrderingMatch:=ÇélIné Ändrè)-5] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrgeneralizedTimeMatch:generalizedTimeMatch:=20100218171300Z)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrgeneralizedTimeMatch:generalizedTimeOrderingMatch:=20100218171300Z)-6] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrbooleanMatch:booleanMatch:=TRUE)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreIA5Match:caseIgnoreIA5Match:=sprain1)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreMatch:caseIgnoreMatch:=ÇélIné Ändrè1)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreMatch:caseIgnoreOrderingMatch:=ÇélIné Ändrè1)-6] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreListMatch:caseIgnoreListMatch:=foo1$bar)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrobjectIdentifierMatch:objectIdentifierMatch:=1.3.6.1.4.1.1466.115.121.1.15)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrdirectoryStringFirstComponentMatch:directoryStringFirstComponentMatch:=ÇélIné Ändrè1)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrobjectIdentifierFirstComponentMatch:objectIdentifierFirstComponentMatch:=1.3.6.1.4.1.1466.115.121.1.15)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrdistinguishedNameMatch:distinguishedNameMatch:=cn=foo1,cn=bar)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrintegerMatch:integerMatch:=-2)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrintegerMatch:integerOrderingMatch:=-2)-6] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrintegerFirstComponentMatch:integerFirstComponentMatch:=-2)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attruniqueMemberMatch:uniqueMemberMatch:=cn=foo1,cn=bar#'0001'B)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrnumericStringMatch:numericStringMatch:=00001)-10] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrnumericStringMatch:numericStringMatch:=00001)-11] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrtelephoneNumberMatch:telephoneNumberMatch:=+1 408 555 4798)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attroctetStringMatch:octetStringMatch:=AAAAAAAAAAAAAAE=)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attroctetStringMatch:octetStringOrderingMatch:=AAAAAAAAAAAAAAE=)-6] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseExactMatch=*ÇélIné Ändrè*)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseExactMatch=ÇélIné Ändrè*)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseExactMatch=*ÇélIné Ändrè)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseExactMatch=*é Ä*)-5] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseExactIA5Match=*Sprain*)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseExactIA5Match=Sprain*)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseExactIA5Match=*Sprain)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseExactIA5Match=*rai*)-3] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreIA5Match=*sprain1*)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreIA5Match=sprain1*)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreIA5Match=*sprain1)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreIA5Match=*rai*)-6] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreMatch=*ÇélIné Ändrè1*)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreMatch=ÇélIné Ändrè1*)-1] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreMatch=*ÇélIné Ändrè1)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreMatch=*é Ä*)-6] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreListMatch=*foo1$bar*)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreListMatch=foo1$bar*)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreListMatch=*foo1$bar)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreListMatch=*1$b*)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrnumericStringMatch=*00001*)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrnumericStringMatch=00001*)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrnumericStringMatch=*00001)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrnumericStringMatch=*000*)-6] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrtelephoneNumberMatch=*+1 408 555 4798*)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrtelephoneNumberMatch=+1 408 555 4798*)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrtelephoneNumberMatch=*+1 408 555 4798)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrtelephoneNumberMatch=* 55*)-6] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_positive[(|(|(ou=nothing1)(ou=people))(|(ou=nothing2)(ou=nothing3)))] | 0.01 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/filter/filter_indexing_test.py::test_positive[(|(|(ou=people)(ou=nothing1))(|(ou=nothing2)(ou=nothing3)))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_positive[(|(|(ou=nothing1)(ou=nothing2))(|(ou=people)(ou=nothing3)))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_positive[(|(|(ou=nothing1)(ou=nothing2))(|(ou=nothing3)(ou=people)))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_positive[(&(sn<=0000000000000000)(givenname>=FFFFFFFFFFFFFFFF))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_positive[(&(sn>=0000000000000000)(sn<=1111111111111111))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_positive[(&(sn>=0000000000000000)(givenname<=FFFFFFFFFFFFFFFF))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing_schema | 3.98 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing[(uidNumber=18446744073709551617)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing[(gidNumber=18446744073709551617)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing[(MYINTATTR=18446744073709551617)] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing[(&(uidNumber=*)(!(uidNumber=18446744073709551617)))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing[(&(gidNumber=*)(!(gidNumber=18446744073709551617)))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing[(&(uidNumber=*)(!(gidNumber=18446744073709551617)))] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing[(&(myintattr=*)(!(myintattr=18446744073709551617)))] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing[(uidNumber>=-18446744073709551617)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing[(gidNumber>=-18446744073709551617)] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing[(uidNumber<=18446744073709551617)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing[(gidNumber<=18446744073709551617)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing[(myintattr<=18446744073709551617)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing_negative[(gidNumber=54321)] | 0.00 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing_negative[(uidNumber=54321)] | 0.00 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing_negative[(myintattr=54321)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing_negative[(gidNumber<=-999999999999999999999999999999)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing_negative[(uidNumber<=-999999999999999999999999999999)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing_negative[(myintattr<=-999999999999999999999999999999)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing_negative[(gidNumber>=999999999999999999999999999999)] | 0.00 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing_negative[(uidNumber>=999999999999999999999999999999)] | 0.00 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing_negative[(myintattr>=999999999999999999999999999999)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_logic_test.py::test_eq | 0.01 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/filter/filter_logic_test.py::test_sub | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_logic_test.py::test_not_eq | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_logic_test.py::test_ranges | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_logic_test.py::test_and_eq | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_logic_test.py::test_range | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_logic_test.py::test_and_allid_shortcut | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_logic_test.py::test_or_eq | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_logic_test.py::test_and_not_eq | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_logic_test.py::test_or_not_eq | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_logic_test.py::test_and_range | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_logic_test.py::test_or_range | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_logic_test.py::test_and_and_eq | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_logic_test.py::test_or_or_eq | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_logic_test.py::test_and_or_eq | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_logic_test.py::test_or_and_eq | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_matching_rules | 0.12 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/filter/filter_match_test.py::test_add_attribute_types | 1.41 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule0] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule1] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule2] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule3] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule4] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule5] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule6] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule7] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule8] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule9] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule10] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule11] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule12] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule13] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule14] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule15] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule16] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule17] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode0] | 0.29 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode1] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode2] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode3] | 0.27 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode4] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode5] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode6] | 0.27 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode7] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode8] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode9] | 0.06 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode10] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode11] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode12] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode13] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode14] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode15] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode16] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode17] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode0] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode1] | 0.06 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode2] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode3] | 0.31 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode4] | 0.06 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode5] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode6] | 0.06 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode7] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode8] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode9] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode10] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode11] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode12] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode13] | 0.30 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode14] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode15] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode16] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode17] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrbitStringMatch='0001'B)-1-(attrbitStringMatch:bitStringMatch:='000100000'B)] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrgeneralizedTimeMatch=20100218171300Z)-1-(attrcaseExactIA5Match=SPRAIN)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrcaseExactMatch>=ÇélIné Ändrè)-5-(attrcaseExactMatch=ÇéLINé ÄNDRè)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrcaseExactMatch:caseExactMatch:=ÇélIné Ändrè)-1-(attrcaseExactMatch>=çéliné ändrè)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrcaseExactIA5Match=Sprain)-1-(attrgeneralizedTimeMatch=20300218171300Z)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrbooleanMatch=TRUE)-1-(attrgeneralizedTimeMatch>=20300218171300Z)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrcaseIgnoreIA5Match=sprain1)-1-(attrcaseIgnoreIA5Match=sprain9999)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrcaseIgnoreMatch=ÇélIné Ändrè1)-1-(attrcaseIgnoreMatch=ÇélIné Ändrè9999)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrcaseIgnoreMatch>=ÇélIné Ändrè1)-6-(attrcaseIgnoreMatch>=ÇélIné Ändrè9999)] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrcaseIgnoreListMatch=foo1$bar)-1-(attrcaseIgnoreListMatch=foo1$bar$baz$biff)] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrobjectIdentifierMatch=1.3.6.1.4.1.1466.115.121.1.15)-1-(attrobjectIdentifierMatch=1.3.6.1.4.1.1466.115.121.1.15.99999)] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrgeneralizedTimeMatch>=20100218171300Z)-6-(attroctetStringMatch>=AAAAAAAAAAABAQQ=)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrdirectoryStringFirstComponentMatch=ÇélIné Ändrè1)-1-(attrdirectoryStringFirstComponentMatch=ÇélIné Ändrè9999)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrobjectIdentifierFirstComponentMatch=1.3.6.1.4.1.1466.115.121.1.15)-1-(attrobjectIdentifierFirstComponentMatch=1.3.6.1.4.1.1466.115.121.1.15.99999)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrdistinguishedNameMatch=cn=foo1,cn=bar)-1-(attrdistinguishedNameMatch=cn=foo1,cn=bar,cn=baz)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrintegerMatch=-2)-1-(attrintegerMatch=-20)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrintegerMatch>=-2)-6-(attrintegerMatch>=20)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrintegerFirstComponentMatch=-2)-1-(attrintegerFirstComponentMatch=-20)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attruniqueMemberMatch=cn=foo1,cn=bar#'0001'B)-1-(attruniqueMemberMatch=cn=foo1,cn=bar#'00010000'B)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrnumericStringMatch=00001)-1-(attrnumericStringMatch=000000001)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrnumericStringMatch>=00001)-6-(attrnumericStringMatch>=01)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrtelephoneNumberMatch=+1 408 555 4798)-1-(attrtelephoneNumberMatch=+2 408 555 4798)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attroctetStringMatch=AAAAAAAAAAAAAAE=)-1-(attroctetStringMatch=AAAAAAAAAAAAAAEB)] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attroctetStringMatch>=AAAAAAAAAAAAAAE=)-6-(attroctetStringMatch>=AAAAAAAAAAABAQE=)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrbitStringMatch:bitStringMatch:='0001'B)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseExactIA5Match:caseExactIA5Match:=Sprain)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseExactMatch:caseExactMatch:=ÇélIné Ändrè)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseExactMatch:caseExactOrderingMatch:=ÇélIné Ändrè)-5] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrgeneralizedTimeMatch:generalizedTimeMatch:=20100218171300Z)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrgeneralizedTimeMatch:generalizedTimeOrderingMatch:=20100218171300Z)-6] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrbooleanMatch:booleanMatch:=TRUE)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreIA5Match:caseIgnoreIA5Match:=sprain1)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreMatch:caseIgnoreMatch:=ÇélIné Ändrè1)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreMatch:caseIgnoreOrderingMatch:=ÇélIné Ändrè1)-6] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreListMatch:caseIgnoreListMatch:=foo1$bar)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrobjectIdentifierMatch:objectIdentifierMatch:=1.3.6.1.4.1.1466.115.121.1.15)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrdirectoryStringFirstComponentMatch:directoryStringFirstComponentMatch:=ÇélIné Ändrè1)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrobjectIdentifierFirstComponentMatch:objectIdentifierFirstComponentMatch:=1.3.6.1.4.1.1466.115.121.1.15)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrdistinguishedNameMatch:distinguishedNameMatch:=cn=foo1,cn=bar)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrintegerMatch:integerMatch:=-2)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrintegerMatch:integerOrderingMatch:=-2)-6] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrintegerFirstComponentMatch:integerFirstComponentMatch:=-2)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attruniqueMemberMatch:uniqueMemberMatch:=cn=foo1,cn=bar#'0001'B)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrnumericStringMatch:numericStringMatch:=00001)-10] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrnumericStringMatch:numericStringMatch:=00001)-11] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrtelephoneNumberMatch:telephoneNumberMatch:=+1 408 555 4798)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attroctetStringMatch:octetStringMatch:=AAAAAAAAAAAAAAE=)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attroctetStringMatch:octetStringOrderingMatch:=AAAAAAAAAAAAAAE=)-6] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseExactMatch=*ÇélIné Ändrè*)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseExactMatch=ÇélIné Ändrè*)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseExactMatch=*ÇélIné Ändrè)-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseExactMatch=*é Ä*)-5] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseExactIA5Match=*Sprain*)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseExactIA5Match=Sprain*)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseExactIA5Match=*Sprain)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseExactIA5Match=*rai*)-3] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreIA5Match=*sprain1*)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreIA5Match=sprain1*)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreIA5Match=*sprain1)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreIA5Match=*rai*)-6] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreMatch=*ÇélIné Ändrè1*)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreMatch=ÇélIné Ändrè1*)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreMatch=*ÇélIné Ändrè1)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreMatch=*é Ä*)-6] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreListMatch=*foo1$bar*)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreListMatch=foo1$bar*)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreListMatch=*foo1$bar)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreListMatch=*1$b*)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrnumericStringMatch=*00001*)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrnumericStringMatch=00001*)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrnumericStringMatch=*00001)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrnumericStringMatch=*000*)-6] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrtelephoneNumberMatch=*+1 408 555 4798*)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrtelephoneNumberMatch=+1 408 555 4798*)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrtelephoneNumberMatch=*+1 408 555 4798)-1] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrtelephoneNumberMatch=* 55*)-6] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/filter_test.py::test_filter_escaped | 0.32 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.filter_test:filter_test.py:42 Running test_filter_escaped... [32mINFO [0m tests.suites.filter.filter_test:filter_test.py:78 test_filter_escaped: PASSED | |||
Passed | suites/filter/filter_test.py::test_filter_search_original_attrs | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.filter_test:filter_test.py:95 Running test_filter_search_original_attrs... [32mINFO [0m tests.suites.filter.filter_test:filter_test.py:108 test_filter_search_original_attrs: PASSED | |||
Passed | suites/filter/filter_test.py::test_filter_scope_one | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.filter_test:filter_test.py:125 Search user using ldapsearch with scope one [32mINFO [0m tests.suites.filter.filter_test:filter_test.py:127 [dn: ou=services,dc=example,dc=com ou: services ] [32mINFO [0m tests.suites.filter.filter_test:filter_test.py:129 Search should only have one entry | |||
Passed | suites/filter/filter_test.py::test_filter_with_attribute_subtype | 0.35 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:filter_test.py:154 Bind as cn=Directory Manager [32mINFO [0m lib389:filter_test.py:161 ######################### ADD ###################### [32mINFO [0m lib389:filter_test.py:184 Try to add Add cn=test_entry both, dc=example,dc=com: dn: cn=test_entry both, dc=example,dc=com cn: test_entry both cn;en: test_entry en cn;fr: test_entry fr objectclass: top objectclass: person sn: test_entry both [32mINFO [0m lib389:filter_test.py:187 Try to add Add cn=test_entry en only, dc=example,dc=com: dn: cn=test_entry en only, dc=example,dc=com cn: test_entry en only cn;en: test_entry en objectclass: top objectclass: person sn: test_entry en only [32mINFO [0m lib389:filter_test.py:190 ######################### SEARCH ###################### [32mINFO [0m lib389:filter_test.py:194 Try to search with filter (&(sn=test_entry en only)(!(cn=test_entry fr))) [32mINFO [0m lib389:filter_test.py:198 Found cn=test_entry en only,dc=example,dc=com [32mINFO [0m lib389:filter_test.py:202 Try to search with filter (&(sn=test_entry en only)(!(cn;fr=test_entry fr))) [32mINFO [0m lib389:filter_test.py:206 Found cn=test_entry en only,dc=example,dc=com [32mINFO [0m lib389:filter_test.py:210 Try to search with filter (&(sn=test_entry en only)(!(cn;en=test_entry en))) [32mINFO [0m lib389:filter_test.py:213 Found none [32mINFO [0m lib389:filter_test.py:215 ######################### DELETE ###################### [32mINFO [0m lib389:filter_test.py:217 Try to delete cn=test_entry both, dc=example,dc=com [32mINFO [0m lib389:filter_test.py:220 Try to delete cn=test_entry en only, dc=example,dc=com [32mINFO [0m tests.suites.filter.filter_test:filter_test.py:223 Testcase PASSED | |||
Passed | suites/filter/filter_test.py::test_extended_search | 0.05 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.filter.filter_test:filter_test.py:250 Running test_filter_escaped... [32mINFO [0m lib389:filter_test.py:267 Try to search with filter (cn:de:=ext-test-entry) [32mINFO [0m lib389:filter_test.py:273 Try to search with filter (cn:caseIgnoreIA5Match:=EXT-TEST-ENTRY) [32mINFO [0m lib389:filter_test.py:279 Try to search with filter (cn:caseIgnoreMatch:=EXT-TEST-ENTRY) [32mINFO [0m lib389:filter_test.py:285 Try to search with filter (cn:caseExactMatch:=EXT-TEST-ENTRY) [32mINFO [0m lib389:filter_test.py:291 Try to search with filter (cn:caseExactMatch:=ext-test-entry) [32mINFO [0m lib389:filter_test.py:297 Try to search with filter (cn:caseExactIA5Match:=EXT-TEST-ENTRY) [32mINFO [0m lib389:filter_test.py:303 Try to search with filter (cn:caseExactIA5Match:=ext-test-entry) | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_telephone[(telephonenumber=*7393)] | 0.07 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_telephone[(telephonenumber=*408*3)] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid=mward)] | 0.06 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(l=sunnyvale)0] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(mail=jreu*)] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(mail=*exam*)] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid=*)] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.15.1:=>AAA)] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:es:=>AAA)] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.15.1.5:=AAA)] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.15.1:=>user100)] | 0.06 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:es:=>user100)] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.15.1.5:=user100)] | 0.06 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.15.1.1:=user1)] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.15.1.1:=z)] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid=user1)] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid<=Z)] | 0.06 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid>=1)] | 0.06 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid>=A)] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid>=user20)] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.18.1.2:=user20)] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.18.1.2:=z)] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.18.1:=>=A)] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:fr:=>=A)] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.18.1.4:=A)] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.18.1:=>=user20)] | 0.06 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:fr:=>=user20)] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.18.1.4:=user20)] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.18.1:=>=z)] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:fr:=>=z)] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(description=This is the special * attribute value)] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(description=*x*)] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid=ptyler)] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid=*wal*)] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(roomNumber=0312)] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid=mw*)] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(roomNumber=2295)] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(l=Cupertino)] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(objectclass=inetorgperson)] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(l=sunnyvale)1] | 0.29 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(roomNumber=200)] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(roomNumber=201)] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(roomNumber=202)] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(l=*)] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(sn~=tiller))(!(uid=ptyler)))] | 0.40 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(sn~=tiller)) (uid=ptyler))] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(uid=*wal*) (roomNumber=0312))] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(uid=*wal*))(!(roomNumber=0312)))] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(uid=*wal*))(roomNumber=0312))] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(uid=*wal*)(!(roomNumber=0312)))] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(uid=*wal*)(|(sn~=tiller) (roomNumber=2295)))] | 0.13 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(&(uid=*wal*) (roomNumber=2295))(&(uid=*wal*) (sn~=tiller)))] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(uid=*wal*)(&(sn~=tiller) (roomNumber=2295)))0] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(|(uid=*wal*) (sn~=tiller))(|(uid=*wal*) (roomNumber=2295)))0] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(uid=*wal*) (roomNumber=2295))0] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(roomNumber=2295) (uid=*wal*))0] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(roomNumber=2295) (uid=*wal*))0] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(uid=*wal*)(&(sn~=tiller) (roomNumber=2295)))1] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(uid=*wal*) (roomNumber=2295))] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(uid=*wal*) (l=*))] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(l=Cupertino) (|(uid=*wal*)(&(sn~=tiller) (roomNumber=2295))))] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(l=Cupertino))(!(|(uid=*wal*)(&(sn~=tiller) (roomNumber=2295)))))] | 0.06 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(l=Cupertino))(|(uid=*wal*)(&(sn~=tiller) (roomNumber=2295))))] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(l=Cupertino)(!(|(uid=*wal*)(&(sn~=tiller) (roomNumber=2295)))))] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(l=Cupertino) (|(uid=*wal*)(&(sn~=tiller) (roomNumber=2295))))] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(l=Cupertino))(!(|(uid=*wal*)(&(sn~=tiller) (roomNumber=2295)))))] | 0.29 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(l=Cupertino))(|(uid=*wal*)(&(sn~=tiller) (roomNumber=2295))))] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(l=Cupertino)(!(|(uid=*wal*)(&(sn~=tiller) (roomNumber=2295)))))] | 0.06 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(uid=user1))(objectclass=inetorgperson))] | 0.06 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(uid=user1))(objectclass=inetorgperson))] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(mail=cnewport@example.com))(l=sunnyvale))] | 0.06 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(|(uid=*wal*) (sn~=tiller))(|(uid=*wal*) (roomNumber=2295)))1] | 0.06 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(uid=*wal*) (roomNumber=2295))1] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(roomNumber=2295) (uid=*wal*))1] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(roomNumber=2295) (uid=*wal*))1] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(roomNumber=200))(!(roomNumber=201))(!(roomNumber=202))(l=sunnyvale))] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(uid=user40))(&(!(uid=user1))(!(uid=user20))(!(uid=user30))(objectclass=inetorgperson)))] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(uid=user40))(&(!(uid=user1))(!(uid=user20))(!(uid=user30))(objectclass=inetorgperson)))] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(roomNumber=2254))(&(!(roomNumber=200))(!(roomNumber=201))(!(roomNumber=202))(l=sunnyvale)))] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(roomNumber=2254))(&(!(roomNumber=200))(!(roomNumber=201))(!(roomNumber=202))(l=sunnyvale)))] | 0.31 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(uid=user1))(!(uid:2.16.840.1.113730.3.3.2.18.1:=<=user20))(!(uid=user30)))] | 0.06 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(uid=user1))(!(uid:2.16.840.1.113730.3.3.2.18.1:=<=user20))(!(uid=user30)))] | 0.14 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(roomNumber=4012))(!(roomNumber=3924))(!(roomNumber=4508)))] | 0.31 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(roomNumber=4012))(!(roomNumber=3924))(!(roomNumber=4508)))] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(& (objectclass=inetorgperson)(!(uid=user1))(!(uid:2.16.840.1.113730.3.3.2.18.1:=<=user20))(!(uid=user30)))] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(| (objectclass=inetorgperson)(!(uid=user1))(!(uid:2.16.840.1.113730.3.3.2.18.1:=<=user20))(!(uid=user30)))] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(l=sunnyvale)(!(roomNumber=4012))(!(roomNumber=3924))(!(roomNumber=4508)))] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(l=sunnyvale)(!(roomNumber=4012))(!(roomNumber=3924))(!(roomNumber=4508)))] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(!(|(!(l=*))(!(l=sunnyvale))))] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(l=*))(!(l=sunnyvale)))] | 0.06 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(l=Cupertino))(!(mail=*exam*))(!(|(uid=*wal*) (l=*))))] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(l=Cupertino))(!(mail=*exam*))(|(uid=*wal*) (l=*)))] | 0.06 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(l=Cupertino))(mail=*exam*) (|(uid=*wal*) (l=*)))] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(l=Cupertino) (mail=*exam*) (|(uid=*wal*) (l=*)))] | 0.06 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(l=Cupertino))(!(mail=*exam*))(!(|(uid=*wal*) (l=*))))] | 0.05 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(l=Cupertino))(!(mail=*exam*))(|(uid=*wal*) (l=*)))] | 0.06 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(l=Cupertino))(mail=*exam*)(!(|(uid=*wal*) (l=*))))] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(l=Cupertino))(mail=*exam*) (|(uid=*wal*) (l=*)))] | 0.06 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(l=Cupertino)(!(mail=*exam*))(!(|(uid=*wal*) (l=*))))] | 0.06 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(l=Cupertino)(!(mail=*exam*))(|(uid=*wal*) (l=*)))] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(l=Cupertino) (mail=*exam*)(!(|(uid=*wal*) (l=*))))] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(l=Cupertino)(!(mail=*exam*))(|(uid=*wal*) (l=*)))] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(userpassword=*)] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(fred=*)] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:2.16.840.1.113730.3.3.2.15.1:=<1)] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:es:=<1)] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:2.16.840.1.113730.3.3.2.15.1.1:=1)] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:2.16.840.1.113730.3.3.2.15.1:=<user1)] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:es:=<user1)] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:2.16.840.1.113730.3.3.2.15.1:=<z)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:es:=<z)] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid<=1)] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid<=A)] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid>=Z)] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:2.16.840.1.113730.3.3.2.18.1:=<=A)] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:fr:=<=A)] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:2.16.840.1.113730.3.3.2.18.1.2:=A)] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:2.16.840.1.113730.3.3.2.18.1:=<=user20)] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:fr:=<=user20)] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:2.16.840.1.113730.3.3.2.18.1:=<=z)] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:fr:=<=z)] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:2.16.840.1.113730.3.3.2.18.1.4:=z)] | 0.04 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(sn~=tiller)] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(givenName~=pricella)] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(mail=cnewport@example.com)] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid=user20)] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid=user30)] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid=user40)] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(&(sn~=tiller) (givenName~=pricella))] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(&(sn~=tiller)(!(uid=ptyler)))] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(&(!(l=Cupertino))(mail=*exam*)(!(|(uid=*wal*) (l=*))))] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(&(l=Cupertino)(!(mail=*exam*))(!(|(uid=*wal*) (l=*))))] | 0.28 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(&(l=Cupertino) (mail=*exam*)(!(|(uid=*wal*) (l=*))))] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(&(l=Cupertino) (mail=*exam*) (|(uid=*wal*) (l=*)))] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/filterscanlimit_test.py::test_invalid_configuration | 1.10 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/filter/filterscanlimit_test.py::test_idlistscanlimit | 12.22 | |
No log output captured. | |||
Passed | suites/filter/large_filter_test.py::test_large_filter[(&(objectClass=person)(|(manager=uid=fmcdonnagh,dc=anuj,dc=com)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_0,dc=anuj,dc=com)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_1,dc=anuj,dc=com)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_2,dc=anuj,dc=com)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_3,dc=anuj,dc=com)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_4,dc=anuj,dc=com)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_5,dc=anuj,dc=com)(manager=uid=jvedder, dc=anuj, dc=com)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_6,dc=anuj,dc=com)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_7,dc=anuj,dc=com)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_8,dc=anuj,dc=com)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_9,dc=anuj,dc=com)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_10,dc=anuj,dc=com)(manager=uid=cnewport, dc=anuj, dc=com)))] | 0.06 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/filter/large_filter_test.py::test_large_filter[(&(objectClass=person)(|(manager=uid=fmcdonnagh *)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_0,*)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_1,*)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_2,*)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_3,*)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_4,*)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_5,*)(manager=uid=jvedder,*)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_6,*)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_7,*)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_8,*)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_9,*)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_10,*)(manager=uid=cnewport,*)))] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_supported_features | 0.01 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[-False-oper_attr_list0-] | 0.04 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[-False-oper_attr_list0-*] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[-False-oper_attr_list0-objectClass] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[-True-oper_attr_list1-] | 0.04 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:rfc3673_all_oper_attrs_test.py:158 bound as: uid=all_attrs_test,ou=people,dc=example,dc=com | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[-True-oper_attr_list1-*] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:rfc3673_all_oper_attrs_test.py:158 bound as: uid=all_attrs_test,ou=people,dc=example,dc=com | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[-True-oper_attr_list1-objectClass] | 0.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:rfc3673_all_oper_attrs_test.py:158 bound as: uid=all_attrs_test,ou=people,dc=example,dc=com | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[ou=people,dc=example,dc=com-False-oper_attr_list2-] | 0.04 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[ou=people,dc=example,dc=com-False-oper_attr_list2-*] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[ou=people,dc=example,dc=com-False-oper_attr_list2-objectClass] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[ou=people,dc=example,dc=com-True-oper_attr_list3-] | 0.05 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:rfc3673_all_oper_attrs_test.py:158 bound as: uid=all_attrs_test,ou=people,dc=example,dc=com | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[ou=people,dc=example,dc=com-True-oper_attr_list3-*] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:rfc3673_all_oper_attrs_test.py:158 bound as: uid=all_attrs_test,ou=people,dc=example,dc=com | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[ou=people,dc=example,dc=com-True-oper_attr_list3-objectClass] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:rfc3673_all_oper_attrs_test.py:158 bound as: uid=all_attrs_test,ou=people,dc=example,dc=com | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[uid=all_attrs_test,ou=people,dc=example,dc=com-False-oper_attr_list4-] | 0.05 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[uid=all_attrs_test,ou=people,dc=example,dc=com-False-oper_attr_list4-*] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[uid=all_attrs_test,ou=people,dc=example,dc=com-False-oper_attr_list4-objectClass] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[uid=all_attrs_test,ou=people,dc=example,dc=com-True-oper_attr_list5-] | 0.06 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:rfc3673_all_oper_attrs_test.py:158 bound as: uid=all_attrs_test,ou=people,dc=example,dc=com | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[uid=all_attrs_test,ou=people,dc=example,dc=com-True-oper_attr_list5-*] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:rfc3673_all_oper_attrs_test.py:158 bound as: uid=all_attrs_test,ou=people,dc=example,dc=com | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[uid=all_attrs_test,ou=people,dc=example,dc=com-True-oper_attr_list5-objectClass] | 0.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:rfc3673_all_oper_attrs_test.py:158 bound as: uid=all_attrs_test,ou=people,dc=example,dc=com | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[cn=config-False-oper_attr_list6-] | 0.05 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[cn=config-False-oper_attr_list6-*] | 0.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[cn=config-False-oper_attr_list6-objectClass] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager | |||
Passed | suites/filter/schema_validation_test.py::test_filter_validation_config | 0.35 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/filter/schema_validation_test.py::test_filter_validation_enabled | 4.54 | |
No log output captured. | |||
Passed | suites/filter/schema_validation_test.py::test_filter_validation_warn_safe | 0.46 | |
No log output captured. | |||
Passed | suites/filter/schema_validation_test.py::test_filter_validation_warn_unsafe | 0.40 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition0-cn] | 0.01 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition1-cn] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition2-cn] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition3-cn] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition4-modifiersName] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition5-modifyTimestamp] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition6-modifiersName] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition7-modifyTimestamp] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition8-modifiersName] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition9-modifyTimestamp] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition10-cn] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition11-cn] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition12-modifiersName] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition13-nsRoleDN] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition14-cn] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition15-modifiersName] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition16-nsRoleDN] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(uid=rjense2)-condition17-mailquota] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(uid=rjense2)-condition18-mailquota] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(uid=rjense2)-condition19-mailquota] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(uid=rjense2)-condition20-mailquota] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(uid=rjense2)-condition21-nsRoleDN] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(nsRoleDN=cn=new managed *)-condition22-cn] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(nsRoleDN=cn=new managed *)-condition23-nsRoleDN] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (nsRoleDN=cn=new managed *) (uid=mtyler))-condition24-mailquota] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (nsRoleDN=cn=new managed *) (uid=mtyler))-condition25-nsRoleDN] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (nsRoleDN=cn=new managed *) (uid=mtyler))-condition26-mailquota] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (nsRoleDN=cn=new managed *) (uid=mtyler))-condition27-modifiersName] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (nsRoleDN=cn=new managed *) (uid=mtyler))-condition28-nsRoleDN] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (nsRoleDN=cn=new managed *) (uid=mtyler))-condition29-nsRoleDN] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (nsRoleDN=cn=new managed *) (uid=mtyler))-condition30-modifiersName] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(postalCode=99999)] | 0.01 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(postalAddress=345 California Av., Mountain View, CA)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(postalCode:2.16.840.1.113730.3.3.2.7.1:=88888)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(postalCode:2.16.840.1.113730.3.3.2.7.1.3:=66666)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(emailclass=vpe*)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(emailclass=*emai*)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota=*00)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota=*6*0)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(nsRole=*)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(postalAddress=*)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(emailclass:2.16.840.1.113730.3.3.2.15.1:=>AAA)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(emailclass:es:=>AAA)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(emailclass:2.16.840.1.113730.3.3.2.15.1.5:=AAA)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(emailclass:2.16.840.1.113730.3.3.2.15.1:=>vpemail)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(emailclass:es:=>vpemail)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota:2.16.840.1.113730.3.3.2.15.1.1:=900)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota<=600)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota>=600)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(nsRole~=cn=new)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(uid=*wal*)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(uid=mw*)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(roomNumber=0312)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(l=Cupertino)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(uid=user1)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(objectclass=inetorgperson)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(l=sunnyvale)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(roomNumber=3924)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(l=*)] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(objectclass=*)] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota<=900)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota>=100)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota:2.16.840.1.113730.3.3.2.18.1.2:=600)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota:2.16.840.1.113730.3.3.2.18.1.2:=900)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota:2.16.840.1.113730.3.3.2.18.1:=>=900)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota:fr:=>=900)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota:2.16.840.1.113730.3.3.2.18.1:=>=600)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota:fr:=>=600)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota:2.16.840.1.113730.3.3.2.18.1.4:=600)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota:2.16.840.1.113730.3.3.2.18.1:=>=100)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota:fr:=>=100)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota:2.16.840.1.113730.3.3.2.18.1.4:=100)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(nsRole~=cn=new managed))(!(nsRole=cn=new vaddr filtered role,dc=example,dc=com)))] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(uid=*wal*) (nsRole=cn=*another*))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(uid=*wal*))(!(nsRole=cn=*another*)))] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(uid=*wal*))(nsRole=cn=*another*))] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(uid=*wal*)(!(nsRole=cn=*another*)))] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(uid=*wal*)(|(nsRole~=cn=new managed) (l=Cupertino)))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(&(uid=*wal*) (l=Cupertino))(&(uid=*wal*) (nsRole~=cn=new managed)))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(uid=*wal*)(&(nsRole~=cn=new managed) (l=Cupertino)))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(|(uid=*wal*) (nsRole~=cn=new managed))(|(uid=*wal*) (l=Cupertino)))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(nsRole=cn=*vaddr*) (uid=*wal*))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(uid=*wal*)(&(nsRole~=cn=new managed) (nsRole=cn=*vaddr*)))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(uid=*wal*) (nsRole=cn=*vaddr*))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(nsRole=cn=*vaddr*) (l=*))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(l=Cupertino) (|(uid=*wal*)(&(nsRole~=cn=new managed) (nsRole=cn=*vaddr*))))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(l=Cupertino))(!(|(uid=*wal*)(&(nsRole~=cn=new managed) (nsRole=cn=*vaddr*)))))] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(l=Cupertino))(|(uid=*wal*)(&(nsRole~=cn=new managed) (nsRole=cn=*vaddr*))))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(l=Cupertino)(!(|(uid=*wal*)(&(nsRole~=cn=new managed) (nsRole=cn=*vaddr*)))))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(l=Cupertino) (|(uid=*wal*)(&(nsRole~=cn=new managed) (nsRole=cn=*vaddr*))))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(l=Cupertino))(!(|(uid=*wal*)(&(nsRole~=cn=new managed) (nsRole=cn=*vaddr*)))))] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(l=Cupertino))(|(uid=*wal*)(&(nsRole~=cn=new managed) (nsRole=cn=*vaddr*))))] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(l=Cupertino)(!(|(uid=*wal*)(&(nsRole~=cn=new managed) (nsRole=cn=*vaddr*)))))] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(uid=user1))(objectclass=inetorgperson))] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(uid=user1))(objectclass=inetorgperson))] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(nsRole=cn=*vaddr*))(l=sunnyvale))] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(nsRole=cn=*vaddr*))(l=sunnyvale))] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(uid=user1))(!(uid=user20))(!(uid=user30))(objectclass=inetorgperson))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(uid=user1))(!(uid=user20))(!(uid=user30))(objectclass=inetorgperson))] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(nsRole=cn=another vaddr role,dc=example,dc=com))(!(roomNumber=3924))(!(roomNumber=4508))(l=sunnyvale))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(nsRole=cn=another vaddr role,dc=example,dc=com))(!(roomNumber=3924))(!(roomNumber=4508))(l=sunnyvale))] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(uid=user40))(&(!(uid=user1))(!(uid=user20))(!(uid=user30))(objectclass=inetorgperson)))] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(uid=user40))(&(!(uid=user1))(!(uid=user20))(!(uid=user30))(objectclass=inetorgperson)))] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(roomNumber=2254))(&(!(nsRole=cn=another vaddr role,dc=example,dc=com))(!(roomNumber=3924))(!(roomNumber=4508))(l=sunnyvale)))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(roomNumber=2254))(&(!(nsRole=cn=another vaddr role,dc=example,dc=com))(!(roomNumber=3924))(!(roomNumber=4508))(l=sunnyvale)))] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(uid=user1))(!(uid=user20))(!(uid=user30)))] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(uid=user1))(!(uid=user20))(!(uid=user30)))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(nsRole=cn=another vaddr role,dc=example,dc=com))(!(roomNumber=3924))(!(roomNumber=4508)))] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(nsRole=cn=another vaddr role,dc=example,dc=com))(!(roomNumber=3924))(!(roomNumber=4508)))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(& (objectclass=inetorgperson)(!(uid=user1))(!(uid=user20))(!(uid=user30)))] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(| (objectclass=inetorgperson)(!(uid=user1))(!(uid=user20))(!(uid=user30)))] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(l=sunnyvale)(!(nsRole=cn=another vaddr role,dc=example,dc=com))(!(roomNumber=3924))(!(roomNumber=4508)))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(l=sunnyvale)(!(nsRole=cn=another vaddr role,dc=example,dc=com))(!(roomNumber=3924))(!(roomNumber=4508)))] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(!(|(!(l=*))(!(l=sunnyvale))))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(l=*))(!(l=sunnyvale)))] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(l=Cupertino) (emailclass=*emai*) (|(nsRole=cn=*vaddr*) (l=*)))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(l=Cupertino))(!(emailclass=*emai*))(!(|(nsRole=cn=*vaddr*) (l=*))))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(l=Cupertino))(!(emailclass=*emai*))(|(nsRole=cn=*vaddr*) (l=*)))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(l=Cupertino))(emailclass=*emai*) (|(nsRole=cn=*vaddr*) (l=*)))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(l=Cupertino)(!(emailclass=*emai*))(|(nsRole=cn=*vaddr*) (l=*)))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(l=Cupertino) (emailclass=*emai*) (|(nsRole=cn=*vaddr*) (l=*)))] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(l=Cupertino))(!(emailclass=*emai*))(!(|(nsRole=cn=*vaddr*) (l=*))))] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(l=Cupertino))(!(emailclass=*emai*))(|(nsRole=cn=*vaddr*) (l=*)))] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(l=Cupertino))(emailclass=*emai*)(!(|(nsRole=cn=*vaddr*) (l=*))))] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(l=Cupertino))(emailclass=*emai*) (|(nsRole=cn=*vaddr*) (l=*)))] | 0.03 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(l=Cupertino)(!(emailclass=*emai*))(!(|(nsRole=cn=*vaddr*) (l=*))))] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(l=Cupertino)(!(emailclass=*emai*))(|(nsRole=cn=*vaddr*) (l=*)))] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(l=Cupertino) (emailclass=*emai*)(!(|(nsRole=cn=*vaddr*) (l=*))))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(postalCode:de:==77777)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(fred=*)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(emailclass:2.16.840.1.113730.3.3.2.15.1.5:=vpemail)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(emailclass:2.16.840.1.113730.3.3.2.15.1:=<1)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(emailclass:es:=<1)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(emailclass:2.16.840.1.113730.3.3.2.15.1.1:=1)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(emailclass:2.16.840.1.113730.3.3.2.15.1:=<vpemail)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(emailclass:es:=<vpemail)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(emailclass:2.16.840.1.113730.3.3.2.15.1.1:=vpemail)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota:2.16.840.1.113730.3.3.2.15.1:=<900)] | 0.02 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota:es:=<900)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota<=100)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota>=900)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(nsRole~=cn=new managed)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(nsRole=cn=new vaddr filtered role,dc=example,dc=com)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(nsRole=cn=*another*)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(nsRole=cn=*vaddr*)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(uid=user20)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(uid=user30)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(nsRole=cn=another vaddr role,dc=example,dc=com)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(roomNumber=4508)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(uid=user40)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(roomNumber=2254)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota:2.16.840.1.113730.3.3.2.18.1:=<=100)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota:fr:=<=100)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota:2.16.840.1.113730.3.3.2.18.1.2:=100)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota:2.16.840.1.113730.3.3.2.18.1:=<=600)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota:fr:=<=600)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota:2.16.840.1.113730.3.3.2.18.1:=<=900)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota:fr:=<=900)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota:2.16.840.1.113730.3.3.2.18.1.4:=900)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(Description=This is the special \2a attribute value)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(Description=*\2a*)] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(&(nsRole~=cn=new managed) (nsRole=cn=new vaddr filtered role,dc=example,dc=com))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(&(!(nsRole~=cn=new managed)) (nsRole=cn=new vaddr filtered role,dc=example,dc=com))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(&(nsRole~=cn=new managed)(!(nsRole=cn=new vaddr filtered role,dc=example,dc=com)))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(&(uid=*wal*) (nsRole=cn=*vaddr*))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(&(nsRole=cn=*vaddr*) (uid=*wal*))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(&(!(l=Cupertino))(emailclass=*emai*)(!(|(nsRole=cn=*vaddr*) (l=*))))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(&(l=Cupertino)(!(emailclass=*emai*))(!(|(nsRole=cn=*vaddr*) (l=*))))] | 0.01 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(&(l=Cupertino) (emailclass=*emai*)(!(|(nsRole=cn=*vaddr*) (l=*))))] | 0.01 | |
No log output captured. | |||
Passed | suites/fourwaymmr/fourwaymmr_test.py::test_verify_trees | 3.05 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master3 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'master3', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master4 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39004, 'ldap-secureport': 63704, 'server-id': 'master4', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect a7c1802b-4f99-48b6-b2d8-a56c715fb553 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 0893cdcc-0367-414e-9832-da85965b5198 / got description=a7c1802b-4f99-48b6-b2d8-a56c715fb553) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master3 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 047757d2-2a6a-4caf-b36c-95775404041e / got description=0893cdcc-0367-414e-9832-da85965b5198) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 38a46dfb-0091-4f62-a790-1062a28485f8 / got description=047757d2-2a6a-4caf-b36c-95775404041e) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master4 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 075d772c-b8bd-43b0-96d3-33548a244d70 / got description=38a46dfb-0091-4f62-a790-1062a28485f8) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect b966aa8a-da66-4aff-8be6-cf29cacd3509 / got description=075d772c-b8bd-43b0-96d3-33548a244d70) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master3 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master4 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master3 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master4 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master2 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master4 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master4 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master4 to master2 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master4 to master3 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ef0a2204-b1ea-4d48-891b-020f75722898 / got description=b966aa8a-da66-4aff-8be6-cf29cacd3509) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 40ee16e4-e348-4fd2-941f-381fe278c16d / got description=ef0a2204-b1ea-4d48-891b-020f75722898) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect f32bce2c-3a0e-4394-8d86-5737bbf6afad / got description=40ee16e4-e348-4fd2-941f-381fe278c16d) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working | |||
Passed | suites/fourwaymmr/fourwaymmr_test.py::test_sync_through_to_all_4_masters | 3.08 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 55c913b5-3191-4bfc-932c-e8dc16666c89 / got description=f32bce2c-3a0e-4394-8d86-5737bbf6afad) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 388a92c8-2865-4f9c-a77c-e75e52c899e7 / got description=55c913b5-3191-4bfc-932c-e8dc16666c89) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 5611f669-b906-4da6-bb73-1313b2a31551 / got description=388a92c8-2865-4f9c-a77c-e75e52c899e7) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working | |||
Passed | suites/fourwaymmr/fourwaymmr_test.py::test_modify_some_data_in_m3 | 8.56 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ce8b6bea-e042-4bad-bf5e-af7163fe141a / got description=5611f669-b906-4da6-bb73-1313b2a31551) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect eeb99a55-752b-45f4-aed4-578b26beb913 / got description=ce8b6bea-e042-4bad-bf5e-af7163fe141a) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect d3de8b23-cb2b-4b2f-b7ef-bf1e12fe51aa / got description=eeb99a55-752b-45f4-aed4-578b26beb913) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working | |||
Passed | suites/fourwaymmr/fourwaymmr_test.py::test_delete_a_few_entries_in_m4 | 4.24 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 27ed206b-a4b1-4294-b38e-f24f64a1feb6 / got description=d3de8b23-cb2b-4b2f-b7ef-bf1e12fe51aa) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect f633fc30-40f7-4647-9980-7a7a731f4f6d / got description=27ed206b-a4b1-4294-b38e-f24f64a1feb6) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 49a66064-8721-44e6-8e1d-76974e9d2987 / got description=f633fc30-40f7-4647-9980-7a7a731f4f6d) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 82e5bdb2-9c93-49e0-a087-c976499d3238 / got description=49a66064-8721-44e6-8e1d-76974e9d2987) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working | |||
Passed | suites/fourwaymmr/fourwaymmr_test.py::test_replicated_multivalued_entries | 2.24 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ad84e20e-e754-46e2-8b10-b49cd06e42c6 / got description=82e5bdb2-9c93-49e0-a087-c976499d3238) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ad84e20e-e754-46e2-8b10-b49cd06e42c6 / got description=82e5bdb2-9c93-49e0-a087-c976499d3238) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working | |||
Passed | suites/fourwaymmr/fourwaymmr_test.py::test_bad_replication_agreement | 26.93 | |
No log output captured. | |||
Passed | suites/fourwaymmr/fourwaymmr_test.py::test_nsds5replicaenabled_verify | 72.36 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c1cb9212-0d05-4760-8752-243bf7f05155 / got description=ad84e20e-e754-46e2-8b10-b49cd06e42c6) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c1cb9212-0d05-4760-8752-243bf7f05155 / got description=ad84e20e-e754-46e2-8b10-b49cd06e42c6) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c1cb9212-0d05-4760-8752-243bf7f05155 / got description=ad84e20e-e754-46e2-8b10-b49cd06e42c6) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c1cb9212-0d05-4760-8752-243bf7f05155 / got description=ad84e20e-e754-46e2-8b10-b49cd06e42c6) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c1cb9212-0d05-4760-8752-243bf7f05155 / got description=ad84e20e-e754-46e2-8b10-b49cd06e42c6) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c1cb9212-0d05-4760-8752-243bf7f05155 / got description=ad84e20e-e754-46e2-8b10-b49cd06e42c6) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c1cb9212-0d05-4760-8752-243bf7f05155 / got description=ad84e20e-e754-46e2-8b10-b49cd06e42c6) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c1cb9212-0d05-4760-8752-243bf7f05155 / got description=ad84e20e-e754-46e2-8b10-b49cd06e42c6) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c1cb9212-0d05-4760-8752-243bf7f05155 / got description=ad84e20e-e754-46e2-8b10-b49cd06e42c6) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c1cb9212-0d05-4760-8752-243bf7f05155 / got description=ad84e20e-e754-46e2-8b10-b49cd06e42c6) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c1cb9212-0d05-4760-8752-243bf7f05155 / got description=ad84e20e-e754-46e2-8b10-b49cd06e42c6) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c1cb9212-0d05-4760-8752-243bf7f05155 / got description=ad84e20e-e754-46e2-8b10-b49cd06e42c6) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c1cb9212-0d05-4760-8752-243bf7f05155 / got description=ad84e20e-e754-46e2-8b10-b49cd06e42c6) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c1cb9212-0d05-4760-8752-243bf7f05155 / got description=ad84e20e-e754-46e2-8b10-b49cd06e42c6) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c1cb9212-0d05-4760-8752-243bf7f05155 / got description=ad84e20e-e754-46e2-8b10-b49cd06e42c6) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c1cb9212-0d05-4760-8752-243bf7f05155 / got description=ad84e20e-e754-46e2-8b10-b49cd06e42c6) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c1cb9212-0d05-4760-8752-243bf7f05155 / got description=ad84e20e-e754-46e2-8b10-b49cd06e42c6) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c1cb9212-0d05-4760-8752-243bf7f05155 / got description=ad84e20e-e754-46e2-8b10-b49cd06e42c6) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c1cb9212-0d05-4760-8752-243bf7f05155 / got description=ad84e20e-e754-46e2-8b10-b49cd06e42c6) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c1cb9212-0d05-4760-8752-243bf7f05155 / got description=ad84e20e-e754-46e2-8b10-b49cd06e42c6) [32mINFO [0m lib389.replica:replica.py:2500 FAIL: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c1cb9212-0d05-4760-8752-243bf7f05155 / got description=ad84e20e-e754-46e2-8b10-b49cd06e42c6) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect a0c4a5fe-8b93-4e1e-9e2a-72ec94149974 / got description=ad84e20e-e754-46e2-8b10-b49cd06e42c6) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect dc226b03-7d31-42cd-b129-840e894a67ef / got description=a0c4a5fe-8b93-4e1e-9e2a-72ec94149974) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect dc226b03-7d31-42cd-b129-840e894a67ef / got description=a0c4a5fe-8b93-4e1e-9e2a-72ec94149974) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect dc226b03-7d31-42cd-b129-840e894a67ef / got description=a0c4a5fe-8b93-4e1e-9e2a-72ec94149974) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect dc226b03-7d31-42cd-b129-840e894a67ef / got description=a0c4a5fe-8b93-4e1e-9e2a-72ec94149974) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect dc226b03-7d31-42cd-b129-840e894a67ef / got description=a0c4a5fe-8b93-4e1e-9e2a-72ec94149974) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect dc226b03-7d31-42cd-b129-840e894a67ef / got description=a0c4a5fe-8b93-4e1e-9e2a-72ec94149974) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect dc226b03-7d31-42cd-b129-840e894a67ef / got description=a0c4a5fe-8b93-4e1e-9e2a-72ec94149974) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect dc226b03-7d31-42cd-b129-840e894a67ef / got description=a0c4a5fe-8b93-4e1e-9e2a-72ec94149974) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect dc226b03-7d31-42cd-b129-840e894a67ef / got description=a0c4a5fe-8b93-4e1e-9e2a-72ec94149974) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect dc226b03-7d31-42cd-b129-840e894a67ef / got description=a0c4a5fe-8b93-4e1e-9e2a-72ec94149974) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect dc226b03-7d31-42cd-b129-840e894a67ef / got description=a0c4a5fe-8b93-4e1e-9e2a-72ec94149974) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect dc226b03-7d31-42cd-b129-840e894a67ef / got description=a0c4a5fe-8b93-4e1e-9e2a-72ec94149974) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect dc226b03-7d31-42cd-b129-840e894a67ef / got description=a0c4a5fe-8b93-4e1e-9e2a-72ec94149974) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect dc226b03-7d31-42cd-b129-840e894a67ef / got description=a0c4a5fe-8b93-4e1e-9e2a-72ec94149974) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect dc226b03-7d31-42cd-b129-840e894a67ef / got description=a0c4a5fe-8b93-4e1e-9e2a-72ec94149974) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect dc226b03-7d31-42cd-b129-840e894a67ef / got description=a0c4a5fe-8b93-4e1e-9e2a-72ec94149974) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect dc226b03-7d31-42cd-b129-840e894a67ef / got description=a0c4a5fe-8b93-4e1e-9e2a-72ec94149974) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect dc226b03-7d31-42cd-b129-840e894a67ef / got description=a0c4a5fe-8b93-4e1e-9e2a-72ec94149974) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect dc226b03-7d31-42cd-b129-840e894a67ef / got description=a0c4a5fe-8b93-4e1e-9e2a-72ec94149974) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect dc226b03-7d31-42cd-b129-840e894a67ef / got description=a0c4a5fe-8b93-4e1e-9e2a-72ec94149974) [32mINFO [0m lib389.replica:replica.py:2500 FAIL: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect dc226b03-7d31-42cd-b129-840e894a67ef / got description=a0c4a5fe-8b93-4e1e-9e2a-72ec94149974) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 7034368b-43ef-4573-8793-b00215ce2a81 / got description=a0c4a5fe-8b93-4e1e-9e2a-72ec94149974) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 86750e14-f533-4098-9a81-8dbd9495dfa9 / got description=7034368b-43ef-4573-8793-b00215ce2a81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 86750e14-f533-4098-9a81-8dbd9495dfa9 / got description=7034368b-43ef-4573-8793-b00215ce2a81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 86750e14-f533-4098-9a81-8dbd9495dfa9 / got description=7034368b-43ef-4573-8793-b00215ce2a81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 86750e14-f533-4098-9a81-8dbd9495dfa9 / got description=7034368b-43ef-4573-8793-b00215ce2a81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 86750e14-f533-4098-9a81-8dbd9495dfa9 / got description=7034368b-43ef-4573-8793-b00215ce2a81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 86750e14-f533-4098-9a81-8dbd9495dfa9 / got description=7034368b-43ef-4573-8793-b00215ce2a81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 86750e14-f533-4098-9a81-8dbd9495dfa9 / got description=7034368b-43ef-4573-8793-b00215ce2a81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 86750e14-f533-4098-9a81-8dbd9495dfa9 / got description=7034368b-43ef-4573-8793-b00215ce2a81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 86750e14-f533-4098-9a81-8dbd9495dfa9 / got description=7034368b-43ef-4573-8793-b00215ce2a81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 86750e14-f533-4098-9a81-8dbd9495dfa9 / got description=7034368b-43ef-4573-8793-b00215ce2a81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 86750e14-f533-4098-9a81-8dbd9495dfa9 / got description=7034368b-43ef-4573-8793-b00215ce2a81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 86750e14-f533-4098-9a81-8dbd9495dfa9 / got description=7034368b-43ef-4573-8793-b00215ce2a81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 86750e14-f533-4098-9a81-8dbd9495dfa9 / got description=7034368b-43ef-4573-8793-b00215ce2a81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 86750e14-f533-4098-9a81-8dbd9495dfa9 / got description=7034368b-43ef-4573-8793-b00215ce2a81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 86750e14-f533-4098-9a81-8dbd9495dfa9 / got description=7034368b-43ef-4573-8793-b00215ce2a81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 86750e14-f533-4098-9a81-8dbd9495dfa9 / got description=7034368b-43ef-4573-8793-b00215ce2a81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 86750e14-f533-4098-9a81-8dbd9495dfa9 / got description=7034368b-43ef-4573-8793-b00215ce2a81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 86750e14-f533-4098-9a81-8dbd9495dfa9 / got description=7034368b-43ef-4573-8793-b00215ce2a81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 86750e14-f533-4098-9a81-8dbd9495dfa9 / got description=7034368b-43ef-4573-8793-b00215ce2a81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 86750e14-f533-4098-9a81-8dbd9495dfa9 / got description=7034368b-43ef-4573-8793-b00215ce2a81) [32mINFO [0m lib389.replica:replica.py:2500 FAIL: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 86750e14-f533-4098-9a81-8dbd9495dfa9 / got description=7034368b-43ef-4573-8793-b00215ce2a81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 71b13263-103f-4fb8-a3a3-7f4ef9d9980a / got description=86750e14-f533-4098-9a81-8dbd9495dfa9) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working | |||
Passed | suites/fourwaymmr/fourwaymmr_test.py::test_create_an_entry_on_the_supplier | 4.19 | |
No log output captured. | |||
Passed | suites/fourwaymmr/fourwaymmr_test.py::test_bob_acceptance_tests | 11.49 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c3214a07-13ad-452b-884d-80f7d06f722b / got description=71b13263-103f-4fb8-a3a3-7f4ef9d9980a) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working | |||
Passed | suites/fractional/fractional_test.py::test_fractional_agreements | 3.37 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for consumer1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for consumer2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39202, 'ldap-secureport': 63902, 'server-id': 'consumer2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 48c66c30-15d3-4ab4-8be8-e6f5e2833e41 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 5a3c3f5e-ce6d-42a1-b457-2bf910545a71 / got description=48c66c30-15d3-4ab4-8be8-e6f5e2833e41) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:169 Joining consumer consumer1 from master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 6f2a5384-c25a-4a45-b39d-3422de14e86d / got description=5a3c3f5e-ce6d-42a1-b457-2bf910545a71) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is working [32mINFO [0m lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 [32mINFO [0m lib389.topologies:topologies.py:169 Joining consumer consumer2 from master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect 56bbb8ff-730b-416f-9f2f-a68ee810c263 / got description=6f2a5384-c25a-4a45-b39d-3422de14e86d) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 is working [32mINFO [0m lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 [32mINFO [0m lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 already exists [32mINFO [0m lib389.topologies:topologies.py:174 Ensuring consumer consumer2 from master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 already exists [32mINFO [0m lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from master2 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is was created [32mINFO [0m lib389.topologies:topologies.py:174 Ensuring consumer consumer2 from master2 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 is was created -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 4cbb8706-f734-4f02-83b8-e019ec77b348 / got description=56bbb8ff-730b-416f-9f2f-a68ee810c263) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect f5ecf8af-02e5-4ff6-81e1-f5cd02932a42 / got description=4cbb8706-f734-4f02-83b8-e019ec77b348) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect 03732dc2-a7b7-45f9-9657-ad14b118628e / got description=f5ecf8af-02e5-4ff6-81e1-f5cd02932a42) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 is working | |||
Passed | suites/fractional/fractional_test.py::test_read_only_consumer | 0.11 | |
No log output captured. | |||
Passed | suites/fractional/fractional_test.py::test_read_write_supplier | 3.09 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 9243a9a5-c45d-4fc2-8915-755240ce0e1e / got description=03732dc2-a7b7-45f9-9657-ad14b118628e) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 777bd855-1d46-4888-8053-413b7506b13d / got description=9243a9a5-c45d-4fc2-8915-755240ce0e1e) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect f3478304-99a9-45cb-954f-2f0e807b2dfe / got description=777bd855-1d46-4888-8053-413b7506b13d) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 is working | |||
Passed | suites/fractional/fractional_test.py::test_filtered_attributes | 3.36 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 1d6fb96f-cb71-418e-abf8-47de99ca4ea0 / got description=f3478304-99a9-45cb-954f-2f0e807b2dfe) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 3de7abd4-587e-4694-bf5c-8b77a1c6c7f8 / got description=1d6fb96f-cb71-418e-abf8-47de99ca4ea0) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect 5e0250d2-20c5-41c6-b64d-fd9f0d465bea / got description=3de7abd4-587e-4694-bf5c-8b77a1c6c7f8) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 is working | |||
Passed | suites/fractional/fractional_test.py::test_fewer_changes_in_single_operation | 9.28 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect a6c558a5-3f63-4901-b863-595595727a75 / got description=5e0250d2-20c5-41c6-b64d-fd9f0d465bea) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 7e02c783-b4f2-479c-ba70-00a0879fea75 / got description=a6c558a5-3f63-4901-b863-595595727a75) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect 7e8dad96-abc8-4aff-ad90-a834fbed4647 / got description=7e02c783-b4f2-479c-ba70-00a0879fea75) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect f4f0e969-d99b-42ad-9f3e-4aa0872155ad / got description=7e8dad96-abc8-4aff-ad90-a834fbed4647) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect feaeb4ad-691e-4766-a7f4-17aa92eac757 / got description=f4f0e969-d99b-42ad-9f3e-4aa0872155ad) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect 94cd1878-8fbb-4f22-bbed-3f3e63e2b069 / got description=feaeb4ad-691e-4766-a7f4-17aa92eac757) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 3f56f485-2966-4ad4-b711-b1fda054c676 / got description=94cd1878-8fbb-4f22-bbed-3f3e63e2b069) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect e4cf7aa6-81b5-42a7-8794-a6b7b67faa32 / got description=3f56f485-2966-4ad4-b711-b1fda054c676) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect bd31338b-f8b0-4896-9e40-1c825cabc66c / got description=e4cf7aa6-81b5-42a7-8794-a6b7b67faa32) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 is working | |||
Passed | suites/fractional/fractional_test.py::test_newly_added_attribute_nsds5replicatedattributelisttotal | 5.12 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect a344569b-9170-4cdf-96de-a07b1340a474 / got description=bd31338b-f8b0-4896-9e40-1c825cabc66c) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect a344569b-9170-4cdf-96de-a07b1340a474 / got description=bd31338b-f8b0-4896-9e40-1c825cabc66c) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect a344569b-9170-4cdf-96de-a07b1340a474 / got description=bd31338b-f8b0-4896-9e40-1c825cabc66c) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 45a7a3b2-67c3-4fe2-bfde-a2a5e32492d9 / got description=a344569b-9170-4cdf-96de-a07b1340a474) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect b6510551-b178-4b1c-84ab-5ce1cd57db5f / got description=45a7a3b2-67c3-4fe2-bfde-a2a5e32492d9) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 is working | |||
Passed | suites/fractional/fractional_test.py::test_attribute_nsds5replicatedattributelisttotal | 21.60 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect fc5beb7d-fe1e-456b-8169-a9f3eded161c / got description=b6510551-b178-4b1c-84ab-5ce1cd57db5f) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 52351304-2b8c-4896-9a74-76604cda9072 / got description=fc5beb7d-fe1e-456b-8169-a9f3eded161c) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect 6c062d5d-ad67-4260-a6ad-4b37a1c709bf / got description=52351304-2b8c-4896-9a74-76604cda9072) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 is working | |||
Passed | suites/fractional/fractional_test.py::test_implicit_replication_of_password_policy | 12.54 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 31dd3180-3b3c-4983-af27-6457bb154912 / got description=6c062d5d-ad67-4260-a6ad-4b37a1c709bf) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect c175c828-cd11-403c-a905-e6a7198c35f0 / got description=31dd3180-3b3c-4983-af27-6457bb154912) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect 4d5653e4-5e53-47c7-86e0-807ba32bed88 / got description=c175c828-cd11-403c-a905-e6a7198c35f0) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 28f7700a-1b6e-4c49-8b77-58355c7da1cc / got description=4d5653e4-5e53-47c7-86e0-807ba32bed88) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 28f7700a-1b6e-4c49-8b77-58355c7da1cc / got description=4d5653e4-5e53-47c7-86e0-807ba32bed88) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 28f7700a-1b6e-4c49-8b77-58355c7da1cc / got description=4d5653e4-5e53-47c7-86e0-807ba32bed88) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 28f7700a-1b6e-4c49-8b77-58355c7da1cc / got description=4d5653e4-5e53-47c7-86e0-807ba32bed88) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 7459896c-cf50-4c9a-ab91-27aed5cd4575 / got description=28f7700a-1b6e-4c49-8b77-58355c7da1cc) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect 29aa3561-5da1-4c10-9eec-98613622c21a / got description=7459896c-cf50-4c9a-ab91-27aed5cd4575) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 644e8135-cd5b-4fbd-9fa3-4ee214f4b277 / got description=28f7700a-1b6e-4c49-8b77-58355c7da1cc) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect bc81ab4d-7f2f-49eb-8e6e-dd70d8543c91 / got description=644e8135-cd5b-4fbd-9fa3-4ee214f4b277) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect 9c523aff-fa37-4ddf-9e96-2a6b1c34c1f5 / got description=bc81ab4d-7f2f-49eb-8e6e-dd70d8543c91) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 is working | |||
Passed | suites/get_effective_rights/acceptance_test.py::test_group_aci_entry_exists | 0.29 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m tests.suites.get_effective_rights.acceptance_test:acceptance_test.py:30 Adding user testuser -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.get_effective_rights.acceptance_test:acceptance_test.py:57 Adding group group1 [32mINFO [0m tests.suites.get_effective_rights.acceptance_test:acceptance_test.py:70 Add an ACI granting add access to a user matching the groupdn [32mINFO [0m lib389:acceptance_test.py:79 dn: uid=testuser,dc=example,dc=com [32mINFO [0m lib389:acceptance_test.py:81 ######## entryLevelRights: b'vadn' | |||
Passed | suites/get_effective_rights/acceptance_test.py::test_group_aci_template_entry | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.get_effective_rights.acceptance_test:acceptance_test.py:105 Add an ACI granting add access to a user matching the userdn [32mINFO [0m lib389:acceptance_test.py:115 dn: cn=template_person_objectclass,dc=example,dc=com [32mINFO [0m lib389:acceptance_test.py:117 ######## entryLevelRights: b'vadn' [32mINFO [0m lib389:acceptance_test.py:120 dn: cn=template_groupofnames_objectclass,dc=example,dc=com [32mINFO [0m lib389:acceptance_test.py:122 ######## entryLevelRights: b'none' | |||
Passed | suites/gssapi/simple_gssapi_test.py::test_invalid_sasl_map | 0.35 | |
No log output captured. | |||
Passed | suites/gssapi/simple_gssapi_test.py::test_missing_user | 1.61 | |
------------------------------Captured stdout call------------------------------ Authenticating as principal testuser/admin@HOSTED.UPSHIFT.RDU2.REDHAT.COM with password. Principal "doesnotexist@HOSTED.UPSHIFT.RDU2.REDHAT.COM" created. Authenticating as principal testuser/admin@HOSTED.UPSHIFT.RDU2.REDHAT.COM with password. K/M@HOSTED.UPSHIFT.RDU2.REDHAT.COM doesnotexist@HOSTED.UPSHIFT.RDU2.REDHAT.COM kadmin/admin@HOSTED.UPSHIFT.RDU2.REDHAT.COM kadmin/changepw@HOSTED.UPSHIFT.RDU2.REDHAT.COM kadmin/ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com@HOSTED.UPSHIFT.RDU2.REDHAT.COM kiprop/ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com@HOSTED.UPSHIFT.RDU2.REDHAT.COM krbtgt/HOSTED.UPSHIFT.RDU2.REDHAT.COM@HOSTED.UPSHIFT.RDU2.REDHAT.COM ldap/ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com@HOSTED.UPSHIFT.RDU2.REDHAT.COM testuser@HOSTED.UPSHIFT.RDU2.REDHAT.COM Authenticating as principal testuser/admin@HOSTED.UPSHIFT.RDU2.REDHAT.COM with password. Entry for principal doesnotexist@HOSTED.UPSHIFT.RDU2.REDHAT.COM with kvno 2, encryption type aes256-cts-hmac-sha1-96 added to keytab WRFILE:/tmp/doesnotexist.keytab. Entry for principal doesnotexist@HOSTED.UPSHIFT.RDU2.REDHAT.COM with kvno 2, encryption type aes128-cts-hmac-sha1-96 added to keytab WRFILE:/tmp/doesnotexist.keytab. ------------------------------Captured stderr call------------------------------ No policy specified for doesnotexist@HOSTED.UPSHIFT.RDU2.REDHAT.COM; defaulting to no policy | |||
Passed | suites/gssapi/simple_gssapi_test.py::test_rejected_mech | 0.34 | |
No log output captured. | |||
Passed | suites/gssapi_repl/gssapi_repl_test.py::test_gssapi_repl | 0.00 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect df099093-dea7-47ee-b536-60aec897d8d0 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 2165b49f-483e-4c47-99d4-22a64973a06c / got description=df099093-dea7-47ee-b536-60aec897d8d0) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists | |||
Passed | suites/healthcheck/health_config_test.py::test_healthcheck_logging_format_should_be_revised | 0.57 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking config:hr_timestamp ... [32mINFO [0m LogCapture:health.py:99 Checking config:passwordscheme ... [32mINFO [0m LogCapture:health.py:99 Checking refint:attr_indexes ... [32mINFO [0m LogCapture:health.py:99 Checking refint:update_delay ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:mappingtree ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:search ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... [32mINFO [0m LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... [32mINFO [0m LogCapture:health.py:99 Checking logs:notes ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:119 1 Issue found! Generating report ... [32mINFO [0m LogCapture:health.py:45 [1] DS Lint Error: DSCLE0001 [32mINFO [0m LogCapture:health.py:46 -------------------------------------------------------------------------------- [32mINFO [0m LogCapture:health.py:47 Severity: LOW [32mINFO [0m LogCapture:health.py:49 Check: config:hr_timestamp [32mINFO [0m LogCapture:health.py:50 Affects: [32mINFO [0m LogCapture:health.py:52 -- cn=config [32mINFO [0m LogCapture:health.py:53 Details: [32mINFO [0m LogCapture:health.py:54 ----------- [32mINFO [0m LogCapture:health.py:55 nsslapd-logging-hr-timestamps-enabled changes the log format in directory server from [07/Jun/2017:17:15:58 +1000] to [07/Jun/2017:17:15:58.716117312 +1000] This actually provides a performance improvement. Additionally, this setting will be removed in a future release. [32mINFO [0m LogCapture:health.py:56 Resolution: [32mINFO [0m LogCapture:health.py:57 ----------- [32mINFO [0m LogCapture:health.py:58 Set nsslapd-logging-hr-timestamps-enabled to on. You can use 'dsconf' to set this attribute. Here is an example: # dsconf slapd-standalone1 config replace nsslapd-logging-hr-timestamps-enabled=on [32mINFO [0m LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== [32mINFO [0m LogCapture:health.py:126 [ { "dsle": "DSCLE0001", "severity": "LOW", "description": "Different log timestamp format.", "items": [ "cn=config" ], "detail": "nsslapd-logging-hr-timestamps-enabled changes the log format in directory server from\n\n[07/Jun/2017:17:15:58 +1000]\n\nto\n\n[07/Jun/2017:17:15:58.716117312 +1000]\n\nThis actually provides a performance improvement. Additionally, this setting will be\nremoved in a future release.\n", "fix": "Set nsslapd-logging-hr-timestamps-enabled to on.\nYou can use 'dsconf' to set this attribute. Here is an example:\n\n # dsconf slapd-standalone1 config replace nsslapd-logging-hr-timestamps-enabled=on", "check": "config:hr_timestamp" } ] [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking config:hr_timestamp ... [32mINFO [0m LogCapture:health.py:99 Checking config:passwordscheme ... [32mINFO [0m LogCapture:health.py:99 Checking refint:attr_indexes ... [32mINFO [0m LogCapture:health.py:99 Checking refint:update_delay ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:mappingtree ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:search ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... [32mINFO [0m LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... [32mINFO [0m LogCapture:health.py:99 Checking logs:notes ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:111 No issues found. [32mINFO [0m LogCapture:health.py:113 [] | |||
Passed | suites/healthcheck/health_config_test.py::test_healthcheck_RI_plugin_is_misconfigured | 1.38 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking config:hr_timestamp ... [32mINFO [0m LogCapture:health.py:99 Checking config:passwordscheme ... [32mINFO [0m LogCapture:health.py:99 Checking refint:attr_indexes ... [32mINFO [0m LogCapture:health.py:99 Checking refint:update_delay ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:mappingtree ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:search ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... [32mINFO [0m LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... [32mINFO [0m LogCapture:health.py:99 Checking logs:notes ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:119 1 Issue found! Generating report ... [32mINFO [0m LogCapture:health.py:45 [1] DS Lint Error: DSRILE0001 [32mINFO [0m LogCapture:health.py:46 -------------------------------------------------------------------------------- [32mINFO [0m LogCapture:health.py:47 Severity: LOW [32mINFO [0m LogCapture:health.py:49 Check: refint:update_delay [32mINFO [0m LogCapture:health.py:50 Affects: [32mINFO [0m LogCapture:health.py:52 -- cn=referential integrity postoperation,cn=plugins,cn=config [32mINFO [0m LogCapture:health.py:53 Details: [32mINFO [0m LogCapture:health.py:54 ----------- [32mINFO [0m LogCapture:health.py:55 The referential integrity plugin has an asynchronous processing mode. This is controlled by the update-delay flag. When this value is 0, referential integrity plugin processes these changes inside of the operation that modified the entry - ie these are synchronous. However, when this is > 0, these are performed asynchronously. This leads to only having referint enabled on one master in MMR to prevent replication conflicts and loops. Additionally, because these are performed in the background these updates may cause spurious update delays to your server by batching changes rather than smaller updates during sync processing. We advise that you set this value to 0, and enable referint on all masters as it provides a more predictable behaviour. [32mINFO [0m LogCapture:health.py:56 Resolution: [32mINFO [0m LogCapture:health.py:57 ----------- [32mINFO [0m LogCapture:health.py:58 Set referint-update-delay to 0. You can use 'dsconf' to set this value. Here is an example: # dsconf slapd-standalone1 plugin referential-integrity set --update-delay 0 You must restart the Directory Server for this change to take effect. [32mINFO [0m LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== [32mINFO [0m LogCapture:health.py:126 [ { "dsle": "DSRILE0001", "severity": "LOW", "description": "Referential integrity plugin may be slower.", "items": [ "cn=referential integrity postoperation,cn=plugins,cn=config" ], "detail": "The referential integrity plugin has an asynchronous processing mode.\nThis is controlled by the update-delay flag. When this value is 0, referential\nintegrity plugin processes these changes inside of the operation that modified\nthe entry - ie these are synchronous.\n\nHowever, when this is > 0, these are performed asynchronously.\n\nThis leads to only having referint enabled on one master in MMR to prevent replication conflicts and loops.\nAdditionally, because these are performed in the background these updates may cause spurious update\ndelays to your server by batching changes rather than smaller updates during sync processing.\n\nWe advise that you set this value to 0, and enable referint on all masters as it provides a more predictable behaviour.\n", "fix": "Set referint-update-delay to 0.\n\nYou can use 'dsconf' to set this value. Here is an example:\n\n # dsconf slapd-standalone1 plugin referential-integrity set --update-delay 0\n\nYou must restart the Directory Server for this change to take effect.", "check": "refint:update_delay" } ] [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking config:hr_timestamp ... [32mINFO [0m LogCapture:health.py:99 Checking config:passwordscheme ... [32mINFO [0m LogCapture:health.py:99 Checking refint:attr_indexes ... [32mINFO [0m LogCapture:health.py:99 Checking refint:update_delay ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:mappingtree ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:search ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... [32mINFO [0m LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... [32mINFO [0m LogCapture:health.py:99 Checking logs:notes ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:111 No issues found. [32mINFO [0m LogCapture:health.py:113 [] | |||
Passed | suites/healthcheck/health_config_test.py::test_healthcheck_RI_plugin_missing_indexes | 0.79 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking config:hr_timestamp ... [32mINFO [0m LogCapture:health.py:99 Checking config:passwordscheme ... [32mINFO [0m LogCapture:health.py:99 Checking refint:attr_indexes ... [32mINFO [0m LogCapture:health.py:99 Checking refint:update_delay ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:mappingtree ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:search ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... [32mINFO [0m LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... [32mINFO [0m LogCapture:health.py:99 Checking logs:notes ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:119 1 Issue found! Generating report ... [32mINFO [0m LogCapture:health.py:45 [1] DS Lint Error: DSRILE0002 [32mINFO [0m LogCapture:health.py:46 -------------------------------------------------------------------------------- [32mINFO [0m LogCapture:health.py:47 Severity: HIGH [32mINFO [0m LogCapture:health.py:49 Check: refint:attr_indexes [32mINFO [0m LogCapture:health.py:50 Affects: [32mINFO [0m LogCapture:health.py:52 -- cn=referential integrity postoperation,cn=plugins,cn=config [32mINFO [0m LogCapture:health.py:52 -- dc=example,dc=com [32mINFO [0m LogCapture:health.py:52 -- member [32mINFO [0m LogCapture:health.py:53 Details: [32mINFO [0m LogCapture:health.py:54 ----------- [32mINFO [0m LogCapture:health.py:55 The referential integrity plugin is configured to use an attribute (member) that does not have an "equality" index in backend (dc=example,dc=com). Failure to have the proper indexing will lead to unindexed searches which cause high CPU and can significantly slow the server down. [32mINFO [0m LogCapture:health.py:56 Resolution: [32mINFO [0m LogCapture:health.py:57 ----------- [32mINFO [0m LogCapture:health.py:58 Check the attributes set in "referint-membership-attr" to make sure they have an index defined that has at least the equality "eq" index type. You will need to reindex the database after adding the missing index type. Here is an example using dsconf: # dsconf slapd-standalone1 backend index add --attr=member --reindex --index-type=eq dc=example,dc=com [32mINFO [0m LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== [32mINFO [0m LogCapture:health.py:126 [ { "dsle": "DSRILE0002", "severity": "HIGH", "description": "Referential integrity plugin configured with unindexed attribute.", "items": [ "cn=referential integrity postoperation,cn=plugins,cn=config", "dc=example,dc=com", "member" ], "detail": "The referential integrity plugin is configured to use an attribute (member)\nthat does not have an \"equality\" index in backend (dc=example,dc=com).\nFailure to have the proper indexing will lead to unindexed searches which\ncause high CPU and can significantly slow the server down.", "fix": "Check the attributes set in \"referint-membership-attr\" to make sure they have\nan index defined that has at least the equality \"eq\" index type. You will\nneed to reindex the database after adding the missing index type. Here is an\nexample using dsconf:\n\n # dsconf slapd-standalone1 backend index add --attr=member --reindex --index-type=eq dc=example,dc=com\n", "check": "refint:attr_indexes" } ] [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking config:hr_timestamp ... [32mINFO [0m LogCapture:health.py:99 Checking config:passwordscheme ... [32mINFO [0m LogCapture:health.py:99 Checking refint:attr_indexes ... [32mINFO [0m LogCapture:health.py:99 Checking refint:update_delay ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:mappingtree ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:search ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... [32mINFO [0m LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... [32mINFO [0m LogCapture:health.py:99 Checking logs:notes ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:111 No issues found. [32mINFO [0m LogCapture:health.py:113 [] | |||
Passed | suites/healthcheck/health_config_test.py::test_healthcheck_virtual_attr_incorrectly_indexed | 0.24 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking config:hr_timestamp ... [32mINFO [0m LogCapture:health.py:99 Checking config:passwordscheme ... [32mINFO [0m LogCapture:health.py:99 Checking refint:attr_indexes ... [32mINFO [0m LogCapture:health.py:99 Checking refint:update_delay ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:mappingtree ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:search ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... [32mINFO [0m LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... [32mINFO [0m LogCapture:health.py:99 Checking logs:notes ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:119 1 Issue found! Generating report ... [32mINFO [0m LogCapture:health.py:45 [1] DS Lint Error: DSVIRTLE0001 [32mINFO [0m LogCapture:health.py:46 -------------------------------------------------------------------------------- [32mINFO [0m LogCapture:health.py:47 Severity: HIGH [32mINFO [0m LogCapture:health.py:49 Check: backends:userroot:virt_attrs [32mINFO [0m LogCapture:health.py:50 Affects: [32mINFO [0m LogCapture:health.py:52 -- Virtual Attributes [32mINFO [0m LogCapture:health.py:52 -- dc=example,dc=com [32mINFO [0m LogCapture:health.py:52 -- Class Of Service (COS) [32mINFO [0m LogCapture:health.py:52 -- cosAttribute: postalcode [32mINFO [0m LogCapture:health.py:53 Details: [32mINFO [0m LogCapture:health.py:54 ----------- [32mINFO [0m LogCapture:health.py:55 You should not index virtual attributes, and as this will break searches that use the attribute in a filter. [32mINFO [0m LogCapture:health.py:56 Resolution: [32mINFO [0m LogCapture:health.py:57 ----------- [32mINFO [0m LogCapture:health.py:58 Remove the index for this attribute from the backend configuration. Here is an example using 'dsconf' to remove an index: # dsconf slapd-standalone1 backend index delete --attr postalcode dc=example,dc=com [32mINFO [0m LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== [32mINFO [0m LogCapture:health.py:126 [ { "dsle": "DSVIRTLE0001", "severity": "HIGH", "description": "Virtual attribute indexed.", "items": [ "Virtual Attributes", "dc=example,dc=com", "Class Of Service (COS)", "cosAttribute: postalcode" ], "detail": "You should not index virtual attributes, and as this will break searches that\nuse the attribute in a filter.", "fix": "Remove the index for this attribute from the backend configuration.\nHere is an example using 'dsconf' to remove an index:\n\n # dsconf slapd-standalone1 backend index delete --attr postalcode dc=example,dc=com", "check": "backends:userroot:virt_attrs" } ] | |||
Passed | suites/healthcheck/health_config_test.py::test_healthcheck_low_disk_space | 0.54 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking config:hr_timestamp ... [32mINFO [0m LogCapture:health.py:99 Checking config:passwordscheme ... [32mINFO [0m LogCapture:health.py:99 Checking refint:attr_indexes ... [32mINFO [0m LogCapture:health.py:99 Checking refint:update_delay ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:mappingtree ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:search ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... [32mINFO [0m LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... [32mINFO [0m LogCapture:health.py:99 Checking logs:notes ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:119 2 Issues found! Generating report ... [32mINFO [0m LogCapture:health.py:45 [1] DS Lint Error: DSVIRTLE0001 [32mINFO [0m LogCapture:health.py:46 -------------------------------------------------------------------------------- [32mINFO [0m LogCapture:health.py:47 Severity: HIGH [32mINFO [0m LogCapture:health.py:49 Check: backends:userroot:virt_attrs [32mINFO [0m LogCapture:health.py:50 Affects: [32mINFO [0m LogCapture:health.py:52 -- Virtual Attributes [32mINFO [0m LogCapture:health.py:52 -- dc=example,dc=com [32mINFO [0m LogCapture:health.py:52 -- Class Of Service (COS) [32mINFO [0m LogCapture:health.py:52 -- cosAttribute: postalcode [32mINFO [0m LogCapture:health.py:53 Details: [32mINFO [0m LogCapture:health.py:54 ----------- [32mINFO [0m LogCapture:health.py:55 You should not index virtual attributes, and as this will break searches that use the attribute in a filter. [32mINFO [0m LogCapture:health.py:56 Resolution: [32mINFO [0m LogCapture:health.py:57 ----------- [32mINFO [0m LogCapture:health.py:58 Remove the index for this attribute from the backend configuration. Here is an example using 'dsconf' to remove an index: # dsconf slapd-standalone1 backend index delete --attr postalcode dc=example,dc=com [32mINFO [0m LogCapture:health.py:45 [2] DS Lint Error: DSDSLE0001 [32mINFO [0m LogCapture:health.py:46 -------------------------------------------------------------------------------- [32mINFO [0m LogCapture:health.py:47 Severity: HIGH [32mINFO [0m LogCapture:health.py:49 Check: monitor-disk-space:disk_space [32mINFO [0m LogCapture:health.py:50 Affects: [32mINFO [0m LogCapture:health.py:52 -- Server [32mINFO [0m LogCapture:health.py:52 -- cn=config [32mINFO [0m LogCapture:health.py:53 Details: [32mINFO [0m LogCapture:health.py:54 ----------- [32mINFO [0m LogCapture:health.py:55 The disk partition used by the server (/), either for the database, the configuration files, or the logs is over 90% full. If the partition becomes completely filled serious problems can occur with the database or the server's stability. [32mINFO [0m LogCapture:health.py:56 Resolution: [32mINFO [0m LogCapture:health.py:57 ----------- [32mINFO [0m LogCapture:health.py:58 Attempt to free up disk space. Also try removing old rotated logs, or disable any verbose logging levels that might have been set. You might consider enabling the "Disk Monitoring" feature in cn=config to help prevent a disorderly shutdown of the server: nsslapd-disk-monitoring: on You can use 'dsconf' to set this value. Here is an example: # dsconf slapd-standalone1 config replace nsslapd-disk-monitoring=on You must restart the Directory Server for this change to take effect. Please see the Administration guide for more information: https://access.redhat.com/documentation/en-us/red_hat_directory_server/10/html/administration_guide/diskmonitoring [32mINFO [0m LogCapture:health.py:124 ===== End Of Report (2 Issues found) ===== [32mINFO [0m LogCapture:health.py:126 [ { "dsle": "DSVIRTLE0001", "severity": "HIGH", "description": "Virtual attribute indexed.", "items": [ "Virtual Attributes", "dc=example,dc=com", "Class Of Service (COS)", "cosAttribute: postalcode" ], "detail": "You should not index virtual attributes, and as this will break searches that\nuse the attribute in a filter.", "fix": "Remove the index for this attribute from the backend configuration.\nHere is an example using 'dsconf' to remove an index:\n\n # dsconf slapd-standalone1 backend index delete --attr postalcode dc=example,dc=com", "check": "backends:userroot:virt_attrs" }, { "dsle": "DSDSLE0001", "severity": "HIGH", "description": "Low disk space.", "items": [ "Server", "cn=config" ], "detail": "The disk partition used by the server (/), either for the database, the\nconfiguration files, or the logs is over 90% full. If the partition becomes\ncompletely filled serious problems can occur with the database or the server's\nstability.", "fix": "Attempt to free up disk space. Also try removing old rotated logs, or disable any\nverbose logging levels that might have been set. You might consider enabling\nthe \"Disk Monitoring\" feature in cn=config to help prevent a disorderly shutdown\nof the server:\n\n nsslapd-disk-monitoring: on\n\nYou can use 'dsconf' to set this value. Here is an example:\n\n # dsconf slapd-standalone1 config replace nsslapd-disk-monitoring=on\n\nYou must restart the Directory Server for this change to take effect.\n\nPlease see the Administration guide for more information:\n\n https://access.redhat.com/documentation/en-us/red_hat_directory_server/10/html/administration_guide/diskmonitoring\n", "check": "monitor-disk-space:disk_space" } ] | |||
Passed | suites/healthcheck/health_config_test.py::test_healthcheck_notes_unindexed_search | 13.04 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking config:hr_timestamp ... [32mINFO [0m LogCapture:health.py:99 Checking config:passwordscheme ... [32mINFO [0m LogCapture:health.py:99 Checking refint:attr_indexes ... [32mINFO [0m LogCapture:health.py:99 Checking refint:update_delay ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:mappingtree ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:search ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... [32mINFO [0m LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... [32mINFO [0m LogCapture:health.py:99 Checking logs:notes ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:119 1 Issue found! Generating report ... [32mINFO [0m LogCapture:health.py:45 [1] DS Lint Error: DSLOGNOTES0001 [32mINFO [0m LogCapture:health.py:46 -------------------------------------------------------------------------------- [32mINFO [0m LogCapture:health.py:47 Severity: Medium [32mINFO [0m LogCapture:health.py:49 Check: logs:notes [32mINFO [0m LogCapture:health.py:50 Affects: [32mINFO [0m LogCapture:health.py:52 -- Performance [32mINFO [0m LogCapture:health.py:52 -- /var/log/dirsrv/slapd-standalone1/access [32mINFO [0m LogCapture:health.py:53 Details: [32mINFO [0m LogCapture:health.py:54 ----------- [32mINFO [0m LogCapture:health.py:55 Found 1 fully unindexed searches in the current access log. Unindexed searches can cause high CPU and slow down the entire server's performance. [1] Unindexed Search - date: . - conn/op: 1/2 - base: dc=example,dc=com - scope: 2 - filter: (&(|(objectClass=nsAccount)(objectClass=nsPerson)(objectClass=simpleSecurityObject)(objectClass=organization)(objectClass=person)(objectClass=account)(objectClass=organizationalUnit)(objectClass=netscapeServer)(objectClass=domain)(objectClass=posixAccount)(objectClass=shadowAccount)(objectClass=posixGroup)(objectClass=mailRecipient))(uid=test*)) - etime: 0.478050142 [32mINFO [0m LogCapture:health.py:56 Resolution: [32mINFO [0m LogCapture:health.py:57 ----------- [32mINFO [0m LogCapture:health.py:58 Examine the searches that are unindexed, and either properly index the attributes in the filter, increase the nsslapd-idlistscanlimit, or stop using that filter. [32mINFO [0m LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== [32mINFO [0m LogCapture:health.py:126 [ { "dsle": "DSLOGNOTES0001", "severity": "Medium", "description": "Unindexed Search", "items": [ "Performance", "/var/log/dirsrv/slapd-standalone1/access" ], "detail": "Found 1 fully unindexed searches in the current access log.\nUnindexed searches can cause high CPU and slow down the entire server's performance.\n\n [1] Unindexed Search\n - date: .\n - conn/op: 1/2\n - base: dc=example,dc=com\n - scope: 2\n - filter: (&(|(objectClass=nsAccount)(objectClass=nsPerson)(objectClass=simpleSecurityObject)(objectClass=organization)(objectClass=person)(objectClass=account)(objectClass=organizationalUnit)(objectClass=netscapeServer)(objectClass=domain)(objectClass=posixAccount)(objectClass=shadowAccount)(objectClass=posixGroup)(objectClass=mailRecipient))(uid=test*))\n - etime: 0.478050142\n", "fix": "Examine the searches that are unindexed, and either properly index the attributes\nin the filter, increase the nsslapd-idlistscanlimit, or stop using that filter.", "check": "logs:notes" } ] | |||
Passed | suites/healthcheck/health_config_test.py::test_healthcheck_notes_unknown_attribute | 15.19 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking config:hr_timestamp ... [32mINFO [0m LogCapture:health.py:99 Checking config:passwordscheme ... [32mINFO [0m LogCapture:health.py:99 Checking refint:attr_indexes ... [32mINFO [0m LogCapture:health.py:99 Checking refint:update_delay ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:mappingtree ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:search ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... [32mINFO [0m LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... [32mINFO [0m LogCapture:health.py:99 Checking logs:notes ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:119 1 Issue found! Generating report ... [32mINFO [0m LogCapture:health.py:45 [1] DS Lint Error: DSLOGNOTES0002 [32mINFO [0m LogCapture:health.py:46 -------------------------------------------------------------------------------- [32mINFO [0m LogCapture:health.py:47 Severity: Medium [32mINFO [0m LogCapture:health.py:49 Check: logs:notes [32mINFO [0m LogCapture:health.py:50 Affects: [32mINFO [0m LogCapture:health.py:52 -- Possible Performance Impact [32mINFO [0m LogCapture:health.py:52 -- /var/log/dirsrv/slapd-standalone1/access [32mINFO [0m LogCapture:health.py:53 Details: [32mINFO [0m LogCapture:health.py:54 ----------- [32mINFO [0m LogCapture:health.py:55 Found 1 searches in the current access log that are using an unknown attribute in the search filter. [1] Invalid Attribute in Filter - date: . - conn/op: 1/2 - filter: (&(|(objectClass=nsAccount)(objectClass=nsPerson)(objectClass=simpleSecurityObject)(objectClass=organization)(objectClass=person)(objectClass=account)(objectClass=organizationalUnit)(objectClass=netscapeServer)(objectClass=domain)(objectClass=posixAccount)(objectClass=shadowAccount)(objectClass=posixGroup)(objectClass=mailRecipient))(unknown=test)) [32mINFO [0m LogCapture:health.py:56 Resolution: [32mINFO [0m LogCapture:health.py:57 ----------- [32mINFO [0m LogCapture:health.py:58 Stop using this these unknown attributes in the filter, or add the schema to the server and make sure it's properly indexed. [32mINFO [0m LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== [32mINFO [0m LogCapture:health.py:126 [ { "dsle": "DSLOGNOTES0002", "severity": "Medium", "description": "Unknown Attribute In Filter", "items": [ "Possible Performance Impact", "/var/log/dirsrv/slapd-standalone1/access" ], "detail": "Found 1 searches in the current access log that are using an\nunknown attribute in the search filter.\n\n [1] Invalid Attribute in Filter\n - date: .\n - conn/op: 1/2\n - filter: (&(|(objectClass=nsAccount)(objectClass=nsPerson)(objectClass=simpleSecurityObject)(objectClass=organization)(objectClass=person)(objectClass=account)(objectClass=organizationalUnit)(objectClass=netscapeServer)(objectClass=domain)(objectClass=posixAccount)(objectClass=shadowAccount)(objectClass=posixGroup)(objectClass=mailRecipient))(unknown=test))\n", "fix": "Stop using this these unknown attributes in the filter, or add the schema\nto the server and make sure it's properly indexed.", "check": "logs:notes" } ] | |||
Passed | suites/healthcheck/health_repl_test.py::test_healthcheck_replication_replica_not_reachable | 2.42 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 097a5117-c462-43e0-afc9-f1ea59db9a1a / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect c51ab1e8-fa65-434a-ae73-b2c81a49939f / got description=097a5117-c462-43e0-afc9-f1ea59db9a1a) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 69bb89cc-1ea2-4b70-9405-5e1489557408 / got description=c51ab1e8-fa65-434a-ae73-b2c81a49939f) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking replication:agmts_status ... [32mINFO [0m LogCapture:health.py:99 Checking replication:conflicts ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:119 1 Issue found! Generating report ... [32mINFO [0m LogCapture:health.py:45 [1] DS Lint Error: DSREPLLE0005 [32mINFO [0m LogCapture:health.py:46 -------------------------------------------------------------------------------- [32mINFO [0m LogCapture:health.py:47 Severity: MEDIUM [32mINFO [0m LogCapture:health.py:49 Check: replication:agmts_status [32mINFO [0m LogCapture:health.py:50 Affects: [32mINFO [0m LogCapture:health.py:52 -- Replication [32mINFO [0m LogCapture:health.py:52 -- Agreement [32mINFO [0m LogCapture:health.py:53 Details: [32mINFO [0m LogCapture:health.py:54 ----------- [32mINFO [0m LogCapture:health.py:55 The replication agreement (002) under "dc=example,dc=com" is not in synchronization, because the consumer server is not reachable. [32mINFO [0m LogCapture:health.py:56 Resolution: [32mINFO [0m LogCapture:health.py:57 ----------- [32mINFO [0m LogCapture:health.py:58 Check if the consumer is running, and also check the errors log for more information. [32mINFO [0m LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== [32mINFO [0m LogCapture:health.py:126 [ { "dsle": "DSREPLLE0005", "severity": "MEDIUM", "description": "Replication consumer not reachable.", "items": [ "Replication", "Agreement" ], "detail": "The replication agreement (002) under \"dc=example,dc=com\" is not in synchronization,\nbecause the consumer server is not reachable.", "fix": "Check if the consumer is running, and also check the errors log for more information.", "check": "replication:agmts_status" } ] [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 8c2574a8-a847-470f-ad6f-c75da143a65e / got description=69bb89cc-1ea2-4b70-9405-5e1489557408) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking replication:agmts_status ... [32mINFO [0m LogCapture:health.py:99 Checking replication:conflicts ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:111 No issues found. [32mINFO [0m LogCapture:health.py:113 [] | |||
Passed | suites/healthcheck/health_repl_test.py::test_healthcheck_changelog_trimming_not_configured | 3.46 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking replication:agmts_status ... [32mINFO [0m LogCapture:health.py:99 Checking replication:conflicts ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:119 1 Issue found! Generating report ... [32mINFO [0m LogCapture:health.py:45 [1] DS Lint Error: DSCLLE0001 [32mINFO [0m LogCapture:health.py:46 -------------------------------------------------------------------------------- [32mINFO [0m LogCapture:health.py:47 Severity: LOW [32mINFO [0m LogCapture:health.py:49 Check: backends:userroot::cl_trimming [32mINFO [0m LogCapture:health.py:50 Affects: [32mINFO [0m LogCapture:health.py:52 -- Replication [32mINFO [0m LogCapture:health.py:52 -- Changelog [32mINFO [0m LogCapture:health.py:52 -- Backends [32mINFO [0m LogCapture:health.py:53 Details: [32mINFO [0m LogCapture:health.py:54 ----------- [32mINFO [0m LogCapture:health.py:55 The replication changelog does have any kind of trimming configured. This will lead to the changelog size growing indefinitely. [32mINFO [0m LogCapture:health.py:56 Resolution: [32mINFO [0m LogCapture:health.py:57 ----------- [32mINFO [0m LogCapture:health.py:58 Configure changelog trimming, preferably by setting the maximum age of a changelog record. Here is an example: # dsconf slapd-master1 replication set-changelog --suffix YOUR_SUFFIX --max-age 30d [32mINFO [0m LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== [32mINFO [0m LogCapture:health.py:126 [ { "dsle": "DSCLLE0001", "severity": "LOW", "description": "Changelog trimming not configured.", "items": [ "Replication", "Changelog", "Backends" ], "detail": "The replication changelog does have any kind of trimming configured. This will\nlead to the changelog size growing indefinitely.", "fix": "Configure changelog trimming, preferably by setting the maximum age of a changelog\nrecord. Here is an example:\n\n # dsconf slapd-master1 replication set-changelog --suffix YOUR_SUFFIX --max-age 30d", "check": "backends:userroot::cl_trimming" } ] [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking replication:agmts_status ... [32mINFO [0m LogCapture:health.py:99 Checking replication:conflicts ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:111 No issues found. [32mINFO [0m LogCapture:health.py:113 [] | |||
Passed | suites/healthcheck/health_repl_test.py::test_healthcheck_replication_presence_of_conflict_entries | 3.59 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect f4f48257-c25f-46cb-bb61-327f87c141e5 / got description=8c2574a8-a847-470f-ad6f-c75da143a65e) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b679273b-d6a7-414c-acc0-57aa11d0a7c7 / got description=f4f48257-c25f-46cb-bb61-327f87c141e5) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ab56a42b-4cda-4740-a107-f95dcb81f56d / got description=b679273b-d6a7-414c-acc0-57aa11d0a7c7) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking replication:agmts_status ... [32mINFO [0m LogCapture:health.py:99 Checking replication:conflicts ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:119 1 Issue found! Generating report ... [32mINFO [0m LogCapture:health.py:45 [1] DS Lint Error: DSREPLLE0002 [32mINFO [0m LogCapture:health.py:46 -------------------------------------------------------------------------------- [32mINFO [0m LogCapture:health.py:47 Severity: LOW [32mINFO [0m LogCapture:health.py:49 Check: replication:conflicts [32mINFO [0m LogCapture:health.py:50 Affects: [32mINFO [0m LogCapture:health.py:52 -- Replication [32mINFO [0m LogCapture:health.py:52 -- Conflict Entries [32mINFO [0m LogCapture:health.py:53 Details: [32mINFO [0m LogCapture:health.py:54 ----------- [32mINFO [0m LogCapture:health.py:55 There were 1 conflict entries found under the replication suffix "dc=example,dc=com". [32mINFO [0m LogCapture:health.py:56 Resolution: [32mINFO [0m LogCapture:health.py:57 ----------- [32mINFO [0m LogCapture:health.py:58 While conflict entries are expected to occur in an MMR environment, they should be resolved. In regards to conflict entries there is always the original/counterpart entry that has a normal DN, and then the conflict version of that entry. Technically both entries are valid, you as the administrator, needs to decide which entry you want to keep. First examine/compare both entries to determine which one you want to keep or remove. You can use the CLI tool "dsconf" to resolve the conflict. Here is an example: List the conflict entries: # dsconf slapd-master1 repl-conflict list dc=example,dc=com Examine conflict entry and its counterpart entry: # dsconf slapd-master1 repl-conflict compare <DN of conflict entry> Remove conflict entry and keep only the original/counterpart entry: # dsconf slapd-master1 repl-conflict delete <DN of conflict entry> Replace the original/counterpart entry with the conflict entry: # dsconf slapd-master1 repl-conflict swap <DN of conflict entry> [32mINFO [0m LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== [32mINFO [0m LogCapture:health.py:126 [ { "dsle": "DSREPLLE0002", "severity": "LOW", "description": "Replication conflict entries found.", "items": [ "Replication", "Conflict Entries" ], "detail": "There were 1 conflict entries found under the replication suffix \"dc=example,dc=com\".", "fix": "While conflict entries are expected to occur in an MMR environment, they\nshould be resolved. In regards to conflict entries there is always the original/counterpart\nentry that has a normal DN, and then the conflict version of that entry. Technically both\nentries are valid, you as the administrator, needs to decide which entry you want to keep.\nFirst examine/compare both entries to determine which one you want to keep or remove. You\ncan use the CLI tool \"dsconf\" to resolve the conflict. Here is an example:\n\n List the conflict entries:\n\n # dsconf slapd-master1 repl-conflict list dc=example,dc=com\n\n Examine conflict entry and its counterpart entry:\n\n # dsconf slapd-master1 repl-conflict compare <DN of conflict entry>\n\n Remove conflict entry and keep only the original/counterpart entry:\n\n # dsconf slapd-master1 repl-conflict delete <DN of conflict entry>\n\n Replace the original/counterpart entry with the conflict entry:\n\n # dsconf slapd-master1 repl-conflict swap <DN of conflict entry>\n", "check": "replication:conflicts" } ] | |||
Passed | suites/healthcheck/health_repl_test.py::test_healthcheck_replication_out_of_sync_broken | 0.59 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master3 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'master3', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 7cfb6acc-c418-4a15-bd6a-88b108a32919 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 027764c4-9250-46b2-8abe-ff111fa469e8 / got description=7cfb6acc-c418-4a15-bd6a-88b108a32919) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master3 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect e5edc11c-106a-43a7-80c6-7bec29f286da / got description=027764c4-9250-46b2-8abe-ff111fa469e8) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect f05fd1cd-d142-4931-ad63-ab15e09dad82 / got description=e5edc11c-106a-43a7-80c6-7bec29f286da) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master3 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master3 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master2 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created -------------------------------Captured log call-------------------------------- [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking replication:agmts_status ... [32mINFO [0m LogCapture:health.py:99 Checking replication:conflicts ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:119 3 Issues found! Generating report ... [32mINFO [0m LogCapture:health.py:45 [1] DS Lint Error: DSREPLLE0001 [32mINFO [0m LogCapture:health.py:46 -------------------------------------------------------------------------------- [32mINFO [0m LogCapture:health.py:47 Severity: HIGH [32mINFO [0m LogCapture:health.py:49 Check: replication:agmts_status [32mINFO [0m LogCapture:health.py:50 Affects: [32mINFO [0m LogCapture:health.py:52 -- Replication [32mINFO [0m LogCapture:health.py:52 -- Agreement [32mINFO [0m LogCapture:health.py:53 Details: [32mINFO [0m LogCapture:health.py:54 ----------- [32mINFO [0m LogCapture:health.py:55 The replication agreement (002) under "dc=example,dc=com" is not in synchronization. [32mINFO [0m LogCapture:health.py:56 Resolution: [32mINFO [0m LogCapture:health.py:57 ----------- [32mINFO [0m LogCapture:health.py:58 You may need to reinitialize this replication agreement. Please check the errors log for more information. If you do need to reinitialize the agreement you can do so using dsconf. Here is an example: # dsconf slapd-master1 repl-agmt init "002" --suffix dc=example,dc=com [32mINFO [0m LogCapture:health.py:45 [2] DS Lint Error: DSREPLLE0001 [32mINFO [0m LogCapture:health.py:46 -------------------------------------------------------------------------------- [32mINFO [0m LogCapture:health.py:47 Severity: HIGH [32mINFO [0m LogCapture:health.py:49 Check: replication:agmts_status [32mINFO [0m LogCapture:health.py:50 Affects: [32mINFO [0m LogCapture:health.py:52 -- Replication [32mINFO [0m LogCapture:health.py:52 -- Agreement [32mINFO [0m LogCapture:health.py:53 Details: [32mINFO [0m LogCapture:health.py:54 ----------- [32mINFO [0m LogCapture:health.py:55 The replication agreement (003) under "dc=example,dc=com" is not in synchronization. [32mINFO [0m LogCapture:health.py:56 Resolution: [32mINFO [0m LogCapture:health.py:57 ----------- [32mINFO [0m LogCapture:health.py:58 You may need to reinitialize this replication agreement. Please check the errors log for more information. If you do need to reinitialize the agreement you can do so using dsconf. Here is an example: # dsconf slapd-master1 repl-agmt init "003" --suffix dc=example,dc=com [32mINFO [0m LogCapture:health.py:45 [3] DS Lint Error: DSCLLE0001 [32mINFO [0m LogCapture:health.py:46 -------------------------------------------------------------------------------- [32mINFO [0m LogCapture:health.py:47 Severity: LOW [32mINFO [0m LogCapture:health.py:49 Check: backends:userroot::cl_trimming [32mINFO [0m LogCapture:health.py:50 Affects: [32mINFO [0m LogCapture:health.py:52 -- Replication [32mINFO [0m LogCapture:health.py:52 -- Changelog [32mINFO [0m LogCapture:health.py:52 -- Backends [32mINFO [0m LogCapture:health.py:53 Details: [32mINFO [0m LogCapture:health.py:54 ----------- [32mINFO [0m LogCapture:health.py:55 The replication changelog does have any kind of trimming configured. This will lead to the changelog size growing indefinitely. [32mINFO [0m LogCapture:health.py:56 Resolution: [32mINFO [0m LogCapture:health.py:57 ----------- [32mINFO [0m LogCapture:health.py:58 Configure changelog trimming, preferably by setting the maximum age of a changelog record. Here is an example: # dsconf slapd-master1 replication set-changelog --suffix YOUR_SUFFIX --max-age 30d [32mINFO [0m LogCapture:health.py:124 ===== End Of Report (3 Issues found) ===== [32mINFO [0m LogCapture:health.py:126 [ { "dsle": "DSREPLLE0001", "severity": "HIGH", "description": "Replication agreement not set to be synchronized.", "items": [ "Replication", "Agreement" ], "detail": "The replication agreement (002) under \"dc=example,dc=com\" is not in synchronization.", "fix": "You may need to reinitialize this replication agreement. Please check the errors\nlog for more information. If you do need to reinitialize the agreement you can do so\nusing dsconf. Here is an example:\n\n # dsconf slapd-master1 repl-agmt init \"002\" --suffix dc=example,dc=com", "check": "replication:agmts_status" }, { "dsle": "DSREPLLE0001", "severity": "HIGH", "description": "Replication agreement not set to be synchronized.", "items": [ "Replication", "Agreement" ], "detail": "The replication agreement (003) under \"dc=example,dc=com\" is not in synchronization.", "fix": "You may need to reinitialize this replication agreement. Please check the errors\nlog for more information. If you do need to reinitialize the agreement you can do so\nusing dsconf. Here is an example:\n\n # dsconf slapd-master1 repl-agmt init \"003\" --suffix dc=example,dc=com", "check": "replication:agmts_status" }, { "dsle": "DSCLLE0001", "severity": "LOW", "description": "Changelog trimming not configured.", "items": [ "Replication", "Changelog", "Backends" ], "detail": "The replication changelog does have any kind of trimming configured. This will\nlead to the changelog size growing indefinitely.", "fix": "Configure changelog trimming, preferably by setting the maximum age of a changelog\nrecord. Here is an example:\n\n # dsconf slapd-master1 replication set-changelog --suffix YOUR_SUFFIX --max-age 30d", "check": "backends:userroot::cl_trimming" } ] | |||
Passed | suites/healthcheck/health_security_test.py::test_healthcheck_insecure_pwd_hash_configured | 0.91 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking config:hr_timestamp ... [32mINFO [0m LogCapture:health.py:99 Checking config:passwordscheme ... [32mINFO [0m LogCapture:health.py:99 Checking encryption:check_tls_version ... [32mINFO [0m LogCapture:health.py:99 Checking tls:certificate_expiration ... [32mINFO [0m LogCapture:health.py:99 Checking fschecks:file_perms ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:119 1 Issue found! Generating report ... [32mINFO [0m LogCapture:health.py:45 [1] DS Lint Error: DSCLE0002 [32mINFO [0m LogCapture:health.py:46 -------------------------------------------------------------------------------- [32mINFO [0m LogCapture:health.py:47 Severity: HIGH [32mINFO [0m LogCapture:health.py:49 Check: config:passwordscheme [32mINFO [0m LogCapture:health.py:50 Affects: [32mINFO [0m LogCapture:health.py:52 -- cn=config [32mINFO [0m LogCapture:health.py:53 Details: [32mINFO [0m LogCapture:health.py:54 ----------- [32mINFO [0m LogCapture:health.py:55 Password storage schemes in Directory Server define how passwords are hashed via a one-way mathematical function for storage. Knowing the hash it is difficult to gain the input, but knowing the input you can easily compare the hash. Many hashes are well known for cryptograhpic verification properties, but are designed to be *fast* to validate. This is the opposite of what we desire for password storage. In the unlikely event of a disclosure, you want hashes to be *difficult* to verify, as this adds a cost of work to an attacker. In Directory Server, we offer one hash suitable for this (PBKDF2_SHA256) and one hash for "legacy" support (SSHA512). Your configuration does not use these for password storage or the root password storage scheme. [32mINFO [0m LogCapture:health.py:56 Resolution: [32mINFO [0m LogCapture:health.py:57 ----------- [32mINFO [0m LogCapture:health.py:58 Perform a configuration reset of the values: passwordStorageScheme nsslapd-rootpwstoragescheme IE, stop Directory Server, and in dse.ldif delete these two lines. When Directory Server is started, they will use the server provided defaults that are secure. You can also use 'dsconf' to replace these values. Here is an example: # dsconf slapd-standalone1 config replace passwordStorageScheme=PBKDF2_SHA256 nsslapd-rootpwstoragescheme=PBKDF2_SHA256 [32mINFO [0m LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== [32mINFO [0m LogCapture:health.py:126 [ { "dsle": "DSCLE0002", "severity": "HIGH", "description": "Weak passwordStorageScheme.", "items": [ "cn=config" ], "detail": "Password storage schemes in Directory Server define how passwords are hashed via a\none-way mathematical function for storage. Knowing the hash it is difficult to gain\nthe input, but knowing the input you can easily compare the hash.\n\nMany hashes are well known for cryptograhpic verification properties, but are\ndesigned to be *fast* to validate. This is the opposite of what we desire for password\nstorage. In the unlikely event of a disclosure, you want hashes to be *difficult* to\nverify, as this adds a cost of work to an attacker.\n\nIn Directory Server, we offer one hash suitable for this (PBKDF2_SHA256) and one hash\nfor \"legacy\" support (SSHA512).\n\nYour configuration does not use these for password storage or the root password storage\nscheme.\n", "fix": "Perform a configuration reset of the values:\n\npasswordStorageScheme\nnsslapd-rootpwstoragescheme\n\nIE, stop Directory Server, and in dse.ldif delete these two lines. When Directory Server\nis started, they will use the server provided defaults that are secure.\n\nYou can also use 'dsconf' to replace these values. Here is an example:\n\n # dsconf slapd-standalone1 config replace passwordStorageScheme=PBKDF2_SHA256 nsslapd-rootpwstoragescheme=PBKDF2_SHA256", "check": "config:passwordscheme" } ] [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking config:hr_timestamp ... [32mINFO [0m LogCapture:health.py:99 Checking config:passwordscheme ... [32mINFO [0m LogCapture:health.py:99 Checking encryption:check_tls_version ... [32mINFO [0m LogCapture:health.py:99 Checking tls:certificate_expiration ... [32mINFO [0m LogCapture:health.py:99 Checking fschecks:file_perms ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:111 No issues found. [32mINFO [0m LogCapture:health.py:113 [] | |||
Passed | suites/healthcheck/health_security_test.py::test_healthcheck_min_allowed_tls_version_too_low | 28.26 | |
------------------------------Captured stdout call------------------------------ Setting system policy to LEGACY Note: System-wide crypto policies are applied on application start-up. It is recommended to restart the system for the change of policies to fully take place. -------------------------------Captured log call-------------------------------- [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking config:hr_timestamp ... [32mINFO [0m LogCapture:health.py:99 Checking config:passwordscheme ... [32mINFO [0m LogCapture:health.py:99 Checking encryption:check_tls_version ... [32mINFO [0m LogCapture:health.py:99 Checking tls:certificate_expiration ... [32mINFO [0m LogCapture:health.py:99 Checking fschecks:file_perms ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:119 1 Issue found! Generating report ... [32mINFO [0m LogCapture:health.py:45 [1] DS Lint Error: DSELE0001 [32mINFO [0m LogCapture:health.py:46 -------------------------------------------------------------------------------- [32mINFO [0m LogCapture:health.py:47 Severity: MEDIUM [32mINFO [0m LogCapture:health.py:49 Check: encryption:check_tls_version [32mINFO [0m LogCapture:health.py:50 Affects: [32mINFO [0m LogCapture:health.py:52 -- cn=encryption,cn=config [32mINFO [0m LogCapture:health.py:53 Details: [32mINFO [0m LogCapture:health.py:54 ----------- [32mINFO [0m LogCapture:health.py:55 This Directory Server may not be using strong TLS protocol versions. TLS1.0 is known to have a number of issues with the protocol. Please see: https://tools.ietf.org/html/rfc7457 It is advised you set this value to the maximum possible. [32mINFO [0m LogCapture:health.py:56 Resolution: [32mINFO [0m LogCapture:health.py:57 ----------- [32mINFO [0m LogCapture:health.py:58 There are two options for setting the TLS minimum version allowed. You, can set "sslVersionMin" in "cn=encryption,cn=config" to a version greater than "TLS1.0" You can also use 'dsconf' to set this value. Here is an example: # dsconf slapd-standalone1 security set --tls-protocol-min=TLS1.2 You must restart the Directory Server for this change to take effect. Or, you can set the system wide crypto policy to FUTURE which will use a higher TLS minimum version, but doing this affects the entire system: # update-crypto-policies --set FUTURE [32mINFO [0m LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== [32mINFO [0m LogCapture:health.py:126 [ { "dsle": "DSELE0001", "severity": "MEDIUM", "description": "Weak TLS protocol version.", "items": [ "cn=encryption,cn=config" ], "detail": "This Directory Server may not be using strong TLS protocol versions. TLS1.0 is known to\nhave a number of issues with the protocol. Please see:\n\nhttps://tools.ietf.org/html/rfc7457\n\nIt is advised you set this value to the maximum possible.", "fix": "There are two options for setting the TLS minimum version allowed. You,\ncan set \"sslVersionMin\" in \"cn=encryption,cn=config\" to a version greater than \"TLS1.0\"\nYou can also use 'dsconf' to set this value. Here is an example:\n\n # dsconf slapd-standalone1 security set --tls-protocol-min=TLS1.2\n\nYou must restart the Directory Server for this change to take effect.\n\nOr, you can set the system wide crypto policy to FUTURE which will use a higher TLS\nminimum version, but doing this affects the entire system:\n\n # update-crypto-policies --set FUTURE", "check": "encryption:check_tls_version" } ] [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking config:hr_timestamp ... [32mINFO [0m LogCapture:health.py:99 Checking config:passwordscheme ... [32mINFO [0m LogCapture:health.py:99 Checking encryption:check_tls_version ... [32mINFO [0m LogCapture:health.py:99 Checking tls:certificate_expiration ... [32mINFO [0m LogCapture:health.py:99 Checking fschecks:file_perms ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:111 No issues found. [32mINFO [0m LogCapture:health.py:113 [] | |||
Passed | suites/healthcheck/health_security_test.py::test_healthcheck_resolvconf_bad_file_perm | 1.52 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking config:hr_timestamp ... [32mINFO [0m LogCapture:health.py:99 Checking config:passwordscheme ... [32mINFO [0m LogCapture:health.py:99 Checking encryption:check_tls_version ... [32mINFO [0m LogCapture:health.py:99 Checking tls:certificate_expiration ... [32mINFO [0m LogCapture:health.py:99 Checking fschecks:file_perms ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:119 1 Issue found! Generating report ... [32mINFO [0m LogCapture:health.py:45 [1] DS Lint Error: DSPERMLE0001 [32mINFO [0m LogCapture:health.py:46 -------------------------------------------------------------------------------- [32mINFO [0m LogCapture:health.py:47 Severity: MEDIUM [32mINFO [0m LogCapture:health.py:49 Check: fschecks:file_perms [32mINFO [0m LogCapture:health.py:50 Affects: [32mINFO [0m LogCapture:health.py:52 -- File Permissions [32mINFO [0m LogCapture:health.py:52 -- /etc/resolv.conf [32mINFO [0m LogCapture:health.py:53 Details: [32mINFO [0m LogCapture:health.py:54 ----------- [32mINFO [0m LogCapture:health.py:55 The file "/etc/resolv.conf" does not have the expected permissions (644). This can cause issues with replication and chaining. [32mINFO [0m LogCapture:health.py:56 Resolution: [32mINFO [0m LogCapture:health.py:57 ----------- [32mINFO [0m LogCapture:health.py:58 Change the file permissions: # chmod 644 /etc/resolv.conf [32mINFO [0m LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== [32mINFO [0m LogCapture:health.py:126 [ { "dsle": "DSPERMLE0001", "severity": "MEDIUM", "description": "Incorrect file permissions.", "items": [ "File Permissions", "/etc/resolv.conf" ], "detail": "The file \"/etc/resolv.conf\" does not have the expected permissions (644). This\ncan cause issues with replication and chaining.", "fix": "Change the file permissions:\n\n # chmod 644 /etc/resolv.conf", "check": "fschecks:file_perms" } ] [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking config:hr_timestamp ... [32mINFO [0m LogCapture:health.py:99 Checking config:passwordscheme ... [32mINFO [0m LogCapture:health.py:99 Checking encryption:check_tls_version ... [32mINFO [0m LogCapture:health.py:99 Checking tls:certificate_expiration ... [32mINFO [0m LogCapture:health.py:99 Checking fschecks:file_perms ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:111 No issues found. [32mINFO [0m LogCapture:health.py:113 [] | |||
Passed | suites/healthcheck/health_security_test.py::test_healthcheck_pwdfile_bad_file_perm | 1.84 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking config:hr_timestamp ... [32mINFO [0m LogCapture:health.py:99 Checking config:passwordscheme ... [32mINFO [0m LogCapture:health.py:99 Checking encryption:check_tls_version ... [32mINFO [0m LogCapture:health.py:99 Checking tls:certificate_expiration ... [32mINFO [0m LogCapture:health.py:99 Checking fschecks:file_perms ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:119 1 Issue found! Generating report ... [32mINFO [0m LogCapture:health.py:45 [1] DS Lint Error: DSPERMLE0002 [32mINFO [0m LogCapture:health.py:46 -------------------------------------------------------------------------------- [32mINFO [0m LogCapture:health.py:47 Severity: HIGH [32mINFO [0m LogCapture:health.py:49 Check: fschecks:file_perms [32mINFO [0m LogCapture:health.py:50 Affects: [32mINFO [0m LogCapture:health.py:52 -- File Permissions [32mINFO [0m LogCapture:health.py:52 -- /etc/dirsrv/slapd-standalone1/pwdfile.txt [32mINFO [0m LogCapture:health.py:53 Details: [32mINFO [0m LogCapture:health.py:54 ----------- [32mINFO [0m LogCapture:health.py:55 The file "/etc/dirsrv/slapd-standalone1/pwdfile.txt" does not have the expected permissions (400). The security database pin/password files should only be readable by Directory Server user. [32mINFO [0m LogCapture:health.py:56 Resolution: [32mINFO [0m LogCapture:health.py:57 ----------- [32mINFO [0m LogCapture:health.py:58 Change the file permissions: # chmod 400 /etc/dirsrv/slapd-standalone1/pwdfile.txt [32mINFO [0m LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== [32mINFO [0m LogCapture:health.py:126 [ { "dsle": "DSPERMLE0002", "severity": "HIGH", "description": "Incorrect security database file permissions.", "items": [ "File Permissions", "/etc/dirsrv/slapd-standalone1/pwdfile.txt" ], "detail": "The file \"/etc/dirsrv/slapd-standalone1/pwdfile.txt\" does not have the expected permissions (400). The\nsecurity database pin/password files should only be readable by Directory Server user.", "fix": "Change the file permissions:\n\n # chmod 400 /etc/dirsrv/slapd-standalone1/pwdfile.txt", "check": "fschecks:file_perms" } ] [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking config:hr_timestamp ... [32mINFO [0m LogCapture:health.py:99 Checking config:passwordscheme ... [32mINFO [0m LogCapture:health.py:99 Checking encryption:check_tls_version ... [32mINFO [0m LogCapture:health.py:99 Checking tls:certificate_expiration ... [32mINFO [0m LogCapture:health.py:99 Checking fschecks:file_perms ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:111 No issues found. [32mINFO [0m LogCapture:health.py:113 [] | |||
Passed | suites/healthcheck/health_sync_test.py::test_healthcheck_replication_out_of_sync_not_broken | 50.93 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master3 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'master3', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 6bc29e73-aec5-47ef-8e03-f36f6180477b / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 820c81b5-d65f-46b8-b145-bdd272ba626e / got description=6bc29e73-aec5-47ef-8e03-f36f6180477b) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master3 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 8e885e79-ea8d-4bd9-8ae0-bd5510b2d1eb / got description=820c81b5-d65f-46b8-b145-bdd272ba626e) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 6b9e565e-e183-480a-8632-5b61e4c0afca / got description=8e885e79-ea8d-4bd9-8ae0-bd5510b2d1eb) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master3 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master3 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master2 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created -------------------------------Captured log call-------------------------------- [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking replication:agmts_status ... [32mINFO [0m LogCapture:health.py:99 Checking replication:conflicts ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:119 1 Issue found! Generating report ... [32mINFO [0m LogCapture:health.py:45 [1] DS Lint Error: DSREPLLE0003 [32mINFO [0m LogCapture:health.py:46 -------------------------------------------------------------------------------- [32mINFO [0m LogCapture:health.py:47 Severity: MEDIUM [32mINFO [0m LogCapture:health.py:49 Check: replication:agmts_status [32mINFO [0m LogCapture:health.py:50 Affects: [32mINFO [0m LogCapture:health.py:52 -- Replication [32mINFO [0m LogCapture:health.py:52 -- Agreement [32mINFO [0m LogCapture:health.py:53 Details: [32mINFO [0m LogCapture:health.py:54 ----------- [32mINFO [0m LogCapture:health.py:55 The replication agreement (001) under "dc=example,dc=com" is not in synchronization. Status message: error (1) can't acquire busy replica (unable to acquire replica: the replica is currently being updated by another supplier.) [32mINFO [0m LogCapture:health.py:56 Resolution: [32mINFO [0m LogCapture:health.py:57 ----------- [32mINFO [0m LogCapture:health.py:58 Replication is not in synchronization but it may recover. Continue to monitor this agreement. [32mINFO [0m LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== [32mINFO [0m LogCapture:health.py:126 [ { "dsle": "DSREPLLE0003", "severity": "MEDIUM", "description": "Unsynchronized replication agreement.", "items": [ "Replication", "Agreement" ], "detail": "The replication agreement (001) under \"dc=example,dc=com\" is not in synchronization.\nStatus message: error (1) can't acquire busy replica (unable to acquire replica: the replica is currently being updated by another supplier.)", "fix": "Replication is not in synchronization but it may recover. Continue to\nmonitor this agreement.", "check": "replication:agmts_status" } ] | |||
Passed | suites/healthcheck/healthcheck_test.py::test_healthcheck_disabled_suffix | 1.03 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking config:hr_timestamp ... [32mINFO [0m LogCapture:health.py:99 Checking config:passwordscheme ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:mappingtree ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:search ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... [32mINFO [0m LogCapture:health.py:99 Checking encryption:check_tls_version ... [32mINFO [0m LogCapture:health.py:99 Checking fschecks:file_perms ... [32mINFO [0m LogCapture:health.py:99 Checking refint:attr_indexes ... [32mINFO [0m LogCapture:health.py:99 Checking refint:update_delay ... [32mINFO [0m LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... [32mINFO [0m LogCapture:health.py:99 Checking replication:agmts_status ... [32mINFO [0m LogCapture:health.py:99 Checking replication:conflicts ... [32mINFO [0m LogCapture:health.py:99 Checking dseldif:nsstate ... [32mINFO [0m LogCapture:health.py:99 Checking tls:certificate_expiration ... [32mINFO [0m LogCapture:health.py:99 Checking logs:notes ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:119 2 Issues found! Generating report ... [32mINFO [0m LogCapture:health.py:45 [1] DS Lint Error: DSBLE0001 [32mINFO [0m LogCapture:health.py:46 -------------------------------------------------------------------------------- [32mINFO [0m LogCapture:health.py:47 Severity: MEDIUM [32mINFO [0m LogCapture:health.py:49 Check: backends:userroot:mappingtree [32mINFO [0m LogCapture:health.py:50 Affects: [32mINFO [0m LogCapture:health.py:52 -- userroot [32mINFO [0m LogCapture:health.py:53 Details: [32mINFO [0m LogCapture:health.py:54 ----------- [32mINFO [0m LogCapture:health.py:55 This backend may be missing the correct mapping tree references. Mapping Trees allow the directory server to determine which backend an operation is routed to in the abscence of other information. This is extremely important for correct functioning of LDAP ADD for example. A correct Mapping tree for this backend must contain the suffix name, the database name and be a backend type. IE: cn=o3Dexample,cn=mapping tree,cn=config cn: o=example nsslapd-backend: userRoot nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree [32mINFO [0m LogCapture:health.py:56 Resolution: [32mINFO [0m LogCapture:health.py:57 ----------- [32mINFO [0m LogCapture:health.py:58 Either you need to create the mapping tree, or you need to repair the related mapping tree. You will need to do this by hand by editing cn=config, or stopping the instance and editing dse.ldif. [32mINFO [0m LogCapture:health.py:45 [2] DS Lint Error: DSBLE0002 [32mINFO [0m LogCapture:health.py:46 -------------------------------------------------------------------------------- [32mINFO [0m LogCapture:health.py:47 Severity: HIGH [32mINFO [0m LogCapture:health.py:49 Check: backends:userroot:search [32mINFO [0m LogCapture:health.py:50 Affects: [32mINFO [0m LogCapture:health.py:52 -- dc=example,dc=com [32mINFO [0m LogCapture:health.py:53 Details: [32mINFO [0m LogCapture:health.py:54 ----------- [32mINFO [0m LogCapture:health.py:55 Unable to query the backend. LDAP error ({'msgtype': 101, 'msgid': 26, 'result': 1, 'desc': 'Operations error', 'ctrls': [], 'info': 'Warning: Operation attempted on a disabled node : dc=example,dc=com\n'}) [32mINFO [0m LogCapture:health.py:56 Resolution: [32mINFO [0m LogCapture:health.py:57 ----------- [32mINFO [0m LogCapture:health.py:58 Check the server's error and access logs for more information. [32mINFO [0m LogCapture:health.py:124 ===== End Of Report (2 Issues found) ===== [32mINFO [0m LogCapture:health.py:126 [ { "dsle": "DSBLE0001", "severity": "MEDIUM", "description": "Possibly incorrect mapping tree.", "items": [ "userroot" ], "detail": "This backend may be missing the correct mapping tree references. Mapping Trees allow\nthe directory server to determine which backend an operation is routed to in the\nabscence of other information. This is extremely important for correct functioning\nof LDAP ADD for example.\n\nA correct Mapping tree for this backend must contain the suffix name, the database name\nand be a backend type. IE:\n\ncn=o3Dexample,cn=mapping tree,cn=config\ncn: o=example\nnsslapd-backend: userRoot\nnsslapd-state: backend\nobjectClass: top\nobjectClass: extensibleObject\nobjectClass: nsMappingTree\n\n", "fix": "Either you need to create the mapping tree, or you need to repair the related\nmapping tree. You will need to do this by hand by editing cn=config, or stopping\nthe instance and editing dse.ldif.\n", "check": "backends:userroot:mappingtree" }, { "dsle": "DSBLE0002", "severity": "HIGH", "description": "Unable to query backend.", "items": [ "dc=example,dc=com" ], "detail": "Unable to query the backend. LDAP error ({'msgtype': 101, 'msgid': 26, 'result': 1, 'desc': 'Operations error', 'ctrls': [], 'info': 'Warning: Operation attempted on a disabled node : dc=example,dc=com\\n'})", "fix": "Check the server's error and access logs for more information.", "check": "backends:userroot:search" } ] | |||
Passed | suites/healthcheck/healthcheck_test.py::test_healthcheck_standalone | 0.31 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking config:hr_timestamp ... [32mINFO [0m LogCapture:health.py:99 Checking config:passwordscheme ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:mappingtree ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:search ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... [32mINFO [0m LogCapture:health.py:99 Checking encryption:check_tls_version ... [32mINFO [0m LogCapture:health.py:99 Checking fschecks:file_perms ... [32mINFO [0m LogCapture:health.py:99 Checking refint:attr_indexes ... [32mINFO [0m LogCapture:health.py:99 Checking refint:update_delay ... [32mINFO [0m LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... [32mINFO [0m LogCapture:health.py:99 Checking replication:agmts_status ... [32mINFO [0m LogCapture:health.py:99 Checking replication:conflicts ... [32mINFO [0m LogCapture:health.py:99 Checking dseldif:nsstate ... [32mINFO [0m LogCapture:health.py:99 Checking tls:certificate_expiration ... [32mINFO [0m LogCapture:health.py:99 Checking logs:notes ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:111 No issues found. [32mINFO [0m LogCapture:health.py:113 [] | |||
Passed | suites/healthcheck/healthcheck_test.py::test_healthcheck_list_checks | 0.30 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m LogCapture:health.py:90 config:hr_timestamp [32mINFO [0m LogCapture:health.py:90 config:passwordscheme [32mINFO [0m LogCapture:health.py:90 backends:userroot:cl_trimming [32mINFO [0m LogCapture:health.py:90 backends:userroot:mappingtree [32mINFO [0m LogCapture:health.py:90 backends:userroot:search [32mINFO [0m LogCapture:health.py:90 backends:userroot:virt_attrs [32mINFO [0m LogCapture:health.py:90 encryption:check_tls_version [32mINFO [0m LogCapture:health.py:90 fschecks:file_perms [32mINFO [0m LogCapture:health.py:90 refint:attr_indexes [32mINFO [0m LogCapture:health.py:90 refint:update_delay [32mINFO [0m LogCapture:health.py:90 monitor-disk-space:disk_space [32mINFO [0m LogCapture:health.py:90 replication:agmts_status [32mINFO [0m LogCapture:health.py:90 replication:conflicts [32mINFO [0m LogCapture:health.py:90 dseldif:nsstate [32mINFO [0m LogCapture:health.py:90 tls:certificate_expiration [32mINFO [0m LogCapture:health.py:90 logs:notes | |||
Passed | suites/healthcheck/healthcheck_test.py::test_healthcheck_list_errors | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m LogCapture:health.py:71 DSBLE0001 :: Possibly incorrect mapping tree. [32mINFO [0m LogCapture:health.py:71 DSBLE0002 :: Unable to query backend. [32mINFO [0m LogCapture:health.py:71 DSBLE0003 :: Uninitialized backend database. [32mINFO [0m LogCapture:health.py:71 DSCERTLE0001 :: Certificate about to expire. [32mINFO [0m LogCapture:health.py:71 DSCERTLE0002 :: Certificate expired. [32mINFO [0m LogCapture:health.py:71 DSCLE0001 :: Different log timestamp format. [32mINFO [0m LogCapture:health.py:71 DSCLE0002 :: Weak passwordStorageScheme. [32mINFO [0m LogCapture:health.py:71 DSCLLE0001 :: Changelog trimming not configured. [32mINFO [0m LogCapture:health.py:71 DSDSLE0001 :: Low disk space. [32mINFO [0m LogCapture:health.py:71 DSELE0001 :: Weak TLS protocol version. [32mINFO [0m LogCapture:health.py:71 DSLOGNOTES0001 :: Unindexed Search [32mINFO [0m LogCapture:health.py:71 DSLOGNOTES0002 :: Unknown Attribute In Filter [32mINFO [0m LogCapture:health.py:71 DSPERMLE0001 :: Incorrect file permissions. [32mINFO [0m LogCapture:health.py:71 DSPERMLE0002 :: Incorrect security database file permissions. [32mINFO [0m LogCapture:health.py:71 DSREPLLE0001 :: Replication agreement not set to be synchronized. [32mINFO [0m LogCapture:health.py:71 DSREPLLE0002 :: Replication conflict entries found. [32mINFO [0m LogCapture:health.py:71 DSREPLLE0003 :: Unsynchronized replication agreement. [32mINFO [0m LogCapture:health.py:71 DSREPLLE0004 :: Unable to get replication agreement status. [32mINFO [0m LogCapture:health.py:71 DSREPLLE0005 :: Replication consumer not reachable. [32mINFO [0m LogCapture:health.py:71 DSRILE0001 :: Referential integrity plugin may be slower. [32mINFO [0m LogCapture:health.py:71 DSRILE0002 :: Referential integrity plugin configured with unindexed attribute. [32mINFO [0m LogCapture:health.py:71 DSSKEWLE0001 :: Medium time skew. [32mINFO [0m LogCapture:health.py:71 DSSKEWLE0002 :: Major time skew. [32mINFO [0m LogCapture:health.py:71 DSSKEWLE0003 :: Extensive time skew. [32mINFO [0m LogCapture:health.py:71 DSVIRTLE0001 :: Virtual attribute indexed. | |||
Passed | suites/healthcheck/healthcheck_test.py::test_healthcheck_check_option | 2.28 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking config:hr_timestamp ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:111 No issues found. [32mINFO [0m LogCapture:health.py:113 [] [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking config:passwordscheme ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:111 No issues found. [32mINFO [0m LogCapture:health.py:113 [] [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:111 No issues found. [32mINFO [0m LogCapture:health.py:113 [] [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:mappingtree ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:111 No issues found. [32mINFO [0m LogCapture:health.py:113 [] [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:search ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:111 No issues found. [32mINFO [0m LogCapture:health.py:113 [] [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:111 No issues found. [32mINFO [0m LogCapture:health.py:113 [] [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking encryption:check_tls_version ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:111 No issues found. [32mINFO [0m LogCapture:health.py:113 [] [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking fschecks:file_perms ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:111 No issues found. [32mINFO [0m LogCapture:health.py:113 [] [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking refint:attr_indexes ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:111 No issues found. [32mINFO [0m LogCapture:health.py:113 [] [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking refint:update_delay ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:111 No issues found. [32mINFO [0m LogCapture:health.py:113 [] [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:111 No issues found. [32mINFO [0m LogCapture:health.py:113 [] [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking replication:agmts_status ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:111 No issues found. [32mINFO [0m LogCapture:health.py:113 [] [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking replication:conflicts ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:111 No issues found. [32mINFO [0m LogCapture:health.py:113 [] [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking dseldif:nsstate ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:111 No issues found. [32mINFO [0m LogCapture:health.py:113 [] [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking tls:certificate_expiration ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:111 No issues found. [32mINFO [0m LogCapture:health.py:113 [] [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking logs:notes ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:111 No issues found. [32mINFO [0m LogCapture:health.py:113 [] | |||
Passed | suites/healthcheck/healthcheck_test.py::test_healthcheck_standalone_tls | 12.91 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking config:hr_timestamp ... [32mINFO [0m LogCapture:health.py:99 Checking config:passwordscheme ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:mappingtree ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:search ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... [32mINFO [0m LogCapture:health.py:99 Checking encryption:check_tls_version ... [32mINFO [0m LogCapture:health.py:99 Checking fschecks:file_perms ... [32mINFO [0m LogCapture:health.py:99 Checking refint:attr_indexes ... [32mINFO [0m LogCapture:health.py:99 Checking refint:update_delay ... [32mINFO [0m LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... [32mINFO [0m LogCapture:health.py:99 Checking replication:agmts_status ... [32mINFO [0m LogCapture:health.py:99 Checking replication:conflicts ... [32mINFO [0m LogCapture:health.py:99 Checking dseldif:nsstate ... [32mINFO [0m LogCapture:health.py:99 Checking tls:certificate_expiration ... [32mINFO [0m LogCapture:health.py:99 Checking logs:notes ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:111 No issues found. [32mINFO [0m LogCapture:health.py:113 [] | |||
Passed | suites/healthcheck/healthcheck_test.py::test_healthcheck_replication | 0.92 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 10c3a3b5-a353-45b0-a2a5-16ac6398ea38 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 9c6eabee-b738-44cf-9ee2-2152c619f93d / got description=10c3a3b5-a353-45b0-a2a5-16ac6398ea38) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking config:hr_timestamp ... [32mINFO [0m LogCapture:health.py:99 Checking config:passwordscheme ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:mappingtree ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:search ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... [32mINFO [0m LogCapture:health.py:99 Checking encryption:check_tls_version ... [32mINFO [0m LogCapture:health.py:99 Checking fschecks:file_perms ... [32mINFO [0m LogCapture:health.py:99 Checking refint:attr_indexes ... [32mINFO [0m LogCapture:health.py:99 Checking refint:update_delay ... [32mINFO [0m LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... [32mINFO [0m LogCapture:health.py:99 Checking replication:agmts_status ... [32mINFO [0m LogCapture:health.py:99 Checking replication:conflicts ... [32mINFO [0m LogCapture:health.py:99 Checking dseldif:nsstate ... [32mINFO [0m LogCapture:health.py:99 Checking tls:certificate_expiration ... [32mINFO [0m LogCapture:health.py:99 Checking logs:notes ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:111 No issues found. [32mINFO [0m LogCapture:health.py:113 [] [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking config:hr_timestamp ... [32mINFO [0m LogCapture:health.py:99 Checking config:passwordscheme ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:mappingtree ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:search ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... [32mINFO [0m LogCapture:health.py:99 Checking encryption:check_tls_version ... [32mINFO [0m LogCapture:health.py:99 Checking fschecks:file_perms ... [32mINFO [0m LogCapture:health.py:99 Checking refint:attr_indexes ... [32mINFO [0m LogCapture:health.py:99 Checking refint:update_delay ... [32mINFO [0m LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... [32mINFO [0m LogCapture:health.py:99 Checking replication:agmts_status ... [32mINFO [0m LogCapture:health.py:99 Checking replication:conflicts ... [32mINFO [0m LogCapture:health.py:99 Checking dseldif:nsstate ... [32mINFO [0m LogCapture:health.py:99 Checking tls:certificate_expiration ... [32mINFO [0m LogCapture:health.py:99 Checking logs:notes ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:111 No issues found. [32mINFO [0m LogCapture:health.py:113 [] | |||
Passed | suites/healthcheck/healthcheck_test.py::test_healthcheck_replication_tls | 26.27 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking config:hr_timestamp ... [32mINFO [0m LogCapture:health.py:99 Checking config:passwordscheme ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:mappingtree ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:search ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... [32mINFO [0m LogCapture:health.py:99 Checking encryption:check_tls_version ... [32mINFO [0m LogCapture:health.py:99 Checking fschecks:file_perms ... [32mINFO [0m LogCapture:health.py:99 Checking refint:attr_indexes ... [32mINFO [0m LogCapture:health.py:99 Checking refint:update_delay ... [32mINFO [0m LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... [32mINFO [0m LogCapture:health.py:99 Checking replication:agmts_status ... [32mINFO [0m LogCapture:health.py:99 Checking replication:conflicts ... [32mINFO [0m LogCapture:health.py:99 Checking dseldif:nsstate ... [32mINFO [0m LogCapture:health.py:99 Checking tls:certificate_expiration ... [32mINFO [0m LogCapture:health.py:99 Checking logs:notes ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:111 No issues found. [32mINFO [0m LogCapture:health.py:113 [] [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking config:hr_timestamp ... [32mINFO [0m LogCapture:health.py:99 Checking config:passwordscheme ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:mappingtree ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:search ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... [32mINFO [0m LogCapture:health.py:99 Checking encryption:check_tls_version ... [32mINFO [0m LogCapture:health.py:99 Checking fschecks:file_perms ... [32mINFO [0m LogCapture:health.py:99 Checking refint:attr_indexes ... [32mINFO [0m LogCapture:health.py:99 Checking refint:update_delay ... [32mINFO [0m LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... [32mINFO [0m LogCapture:health.py:99 Checking replication:agmts_status ... [32mINFO [0m LogCapture:health.py:99 Checking replication:conflicts ... [32mINFO [0m LogCapture:health.py:99 Checking dseldif:nsstate ... [32mINFO [0m LogCapture:health.py:99 Checking tls:certificate_expiration ... [32mINFO [0m LogCapture:health.py:99 Checking logs:notes ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:111 No issues found. [32mINFO [0m LogCapture:health.py:113 [] | |||
Passed | suites/healthcheck/healthcheck_test.py::test_healthcheck_backend_missing_mapping_tree | 1.70 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking config:hr_timestamp ... [32mINFO [0m LogCapture:health.py:99 Checking config:passwordscheme ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:mappingtree ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:search ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... [32mINFO [0m LogCapture:health.py:99 Checking encryption:check_tls_version ... [32mINFO [0m LogCapture:health.py:99 Checking fschecks:file_perms ... [32mINFO [0m LogCapture:health.py:99 Checking refint:attr_indexes ... [32mINFO [0m LogCapture:health.py:99 Checking refint:update_delay ... [32mINFO [0m LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... [32mINFO [0m LogCapture:health.py:99 Checking replication:agmts_status ... [32mINFO [0m LogCapture:health.py:99 Checking replication:conflicts ... [32mINFO [0m LogCapture:health.py:99 Checking dseldif:nsstate ... [32mINFO [0m LogCapture:health.py:99 Checking tls:certificate_expiration ... [32mINFO [0m LogCapture:health.py:99 Checking logs:notes ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:119 2 Issues found! Generating report ... [32mINFO [0m LogCapture:health.py:45 [1] DS Lint Error: DSBLE0001 [32mINFO [0m LogCapture:health.py:46 -------------------------------------------------------------------------------- [32mINFO [0m LogCapture:health.py:47 Severity: MEDIUM [32mINFO [0m LogCapture:health.py:49 Check: backends:userroot:mappingtree [32mINFO [0m LogCapture:health.py:50 Affects: [32mINFO [0m LogCapture:health.py:52 -- userroot [32mINFO [0m LogCapture:health.py:53 Details: [32mINFO [0m LogCapture:health.py:54 ----------- [32mINFO [0m LogCapture:health.py:55 This backend may be missing the correct mapping tree references. Mapping Trees allow the directory server to determine which backend an operation is routed to in the abscence of other information. This is extremely important for correct functioning of LDAP ADD for example. A correct Mapping tree for this backend must contain the suffix name, the database name and be a backend type. IE: cn=o3Dexample,cn=mapping tree,cn=config cn: o=example nsslapd-backend: userRoot nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree [32mINFO [0m LogCapture:health.py:56 Resolution: [32mINFO [0m LogCapture:health.py:57 ----------- [32mINFO [0m LogCapture:health.py:58 Either you need to create the mapping tree, or you need to repair the related mapping tree. You will need to do this by hand by editing cn=config, or stopping the instance and editing dse.ldif. [32mINFO [0m LogCapture:health.py:45 [2] DS Lint Error: DSBLE0003 [32mINFO [0m LogCapture:health.py:46 -------------------------------------------------------------------------------- [32mINFO [0m LogCapture:health.py:47 Severity: LOW [32mINFO [0m LogCapture:health.py:49 Check: backends:userroot:search [32mINFO [0m LogCapture:health.py:50 Affects: [32mINFO [0m LogCapture:health.py:52 -- dc=example,dc=com [32mINFO [0m LogCapture:health.py:53 Details: [32mINFO [0m LogCapture:health.py:54 ----------- [32mINFO [0m LogCapture:health.py:55 The backend database has not been initialized yet [32mINFO [0m LogCapture:health.py:56 Resolution: [32mINFO [0m LogCapture:health.py:57 ----------- [32mINFO [0m LogCapture:health.py:58 You need to import an LDIF file, or create the suffix entry, in order to initialize the database. [32mINFO [0m LogCapture:health.py:124 ===== End Of Report (2 Issues found) ===== [32mINFO [0m LogCapture:health.py:126 [ { "dsle": "DSBLE0001", "severity": "MEDIUM", "description": "Possibly incorrect mapping tree.", "items": [ "userroot" ], "detail": "This backend may be missing the correct mapping tree references. Mapping Trees allow\nthe directory server to determine which backend an operation is routed to in the\nabscence of other information. This is extremely important for correct functioning\nof LDAP ADD for example.\n\nA correct Mapping tree for this backend must contain the suffix name, the database name\nand be a backend type. IE:\n\ncn=o3Dexample,cn=mapping tree,cn=config\ncn: o=example\nnsslapd-backend: userRoot\nnsslapd-state: backend\nobjectClass: top\nobjectClass: extensibleObject\nobjectClass: nsMappingTree\n\n", "fix": "Either you need to create the mapping tree, or you need to repair the related\nmapping tree. You will need to do this by hand by editing cn=config, or stopping\nthe instance and editing dse.ldif.\n", "check": "backends:userroot:mappingtree" }, { "dsle": "DSBLE0003", "severity": "LOW", "description": "Uninitialized backend database.", "items": [ "dc=example,dc=com" ], "detail": "The backend database has not been initialized yet", "fix": "You need to import an LDIF file, or create the suffix entry, in order to initialize the database.", "check": "backends:userroot:search" } ] [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking config:hr_timestamp ... [32mINFO [0m LogCapture:health.py:99 Checking config:passwordscheme ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:mappingtree ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:search ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... [32mINFO [0m LogCapture:health.py:99 Checking encryption:check_tls_version ... [32mINFO [0m LogCapture:health.py:99 Checking fschecks:file_perms ... [32mINFO [0m LogCapture:health.py:99 Checking refint:attr_indexes ... [32mINFO [0m LogCapture:health.py:99 Checking refint:update_delay ... [32mINFO [0m LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... [32mINFO [0m LogCapture:health.py:99 Checking replication:agmts_status ... [32mINFO [0m LogCapture:health.py:99 Checking replication:conflicts ... [32mINFO [0m LogCapture:health.py:99 Checking dseldif:nsstate ... [32mINFO [0m LogCapture:health.py:99 Checking tls:certificate_expiration ... [32mINFO [0m LogCapture:health.py:99 Checking logs:notes ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:111 No issues found. [32mINFO [0m LogCapture:health.py:113 [] | |||
Passed | suites/healthcheck/healthcheck_test.py::test_healthcheck_database_not_initialized | 0.34 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m LogCapture:health.py:94 Beginning lint report, this could take a while ... [32mINFO [0m LogCapture:health.py:99 Checking config:hr_timestamp ... [32mINFO [0m LogCapture:health.py:99 Checking config:passwordscheme ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:mappingtree ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:search ... [32mINFO [0m LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... [32mINFO [0m LogCapture:health.py:99 Checking encryption:check_tls_version ... [32mINFO [0m LogCapture:health.py:99 Checking fschecks:file_perms ... [32mINFO [0m LogCapture:health.py:99 Checking refint:attr_indexes ... [32mINFO [0m LogCapture:health.py:99 Checking refint:update_delay ... [32mINFO [0m LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... [32mINFO [0m LogCapture:health.py:99 Checking replication:agmts_status ... [32mINFO [0m LogCapture:health.py:99 Checking replication:conflicts ... [32mINFO [0m LogCapture:health.py:99 Checking dseldif:nsstate ... [32mINFO [0m LogCapture:health.py:99 Checking tls:certificate_expiration ... [32mINFO [0m LogCapture:health.py:99 Checking logs:notes ... [32mINFO [0m LogCapture:health.py:106 Healthcheck complete. [32mINFO [0m LogCapture:health.py:119 1 Issue found! Generating report ... [32mINFO [0m LogCapture:health.py:45 [1] DS Lint Error: DSBLE0003 [32mINFO [0m LogCapture:health.py:46 -------------------------------------------------------------------------------- [32mINFO [0m LogCapture:health.py:47 Severity: LOW [32mINFO [0m LogCapture:health.py:49 Check: backends:userroot:search [32mINFO [0m LogCapture:health.py:50 Affects: [32mINFO [0m LogCapture:health.py:52 -- dc=example,dc=com [32mINFO [0m LogCapture:health.py:53 Details: [32mINFO [0m LogCapture:health.py:54 ----------- [32mINFO [0m LogCapture:health.py:55 The backend database has not been initialized yet [32mINFO [0m LogCapture:health.py:56 Resolution: [32mINFO [0m LogCapture:health.py:57 ----------- [32mINFO [0m LogCapture:health.py:58 You need to import an LDIF file, or create the suffix entry, in order to initialize the database. [32mINFO [0m LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== [32mINFO [0m LogCapture:health.py:126 [ { "dsle": "DSBLE0003", "severity": "LOW", "description": "Uninitialized backend database.", "items": [ "dc=example,dc=com" ], "detail": "The backend database has not been initialized yet", "fix": "You need to import an LDIF file, or create the suffix entry, in order to initialize the database.", "check": "backends:userroot:search" } ] | |||
Passed | suites/import/import_test.py::test_import_with_index | 9.15 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/import/import_test.py::test_crash_on_ldif2db | 7.65 | |
No log output captured. | |||
Passed | suites/import/import_test.py::test_ldif2db_allows_entries_without_a_parent_to_be_imported | 6.09 | |
-------------------------------Captured log call-------------------------------- [31m[1mERROR [0m lib389:__init__.py:2647 ldif2db: Can't find file: /var/lib/dirsrv/slapd-standalone1/ldif/bogus.ldif | |||
Passed | suites/import/import_test.py::test_issue_a_warning_if_the_cache_size_is_smaller | 8.03 | |
No log output captured. | |||
Passed | suites/import/import_test.py::test_fast_slow_import | 27.19 | |
No log output captured. | |||
Passed | suites/import/import_test.py::test_entry_with_escaped_characters_fails_to_import_and_index | 15.38 | |
------------------------------Captured stderr call------------------------------ [09/Nov/2020:21:01:23.446487585 -0500] - INFO - ldbm_instance_config_cachememsize_set - force a minimal value 512000 [09/Nov/2020:21:01:23.462510151 -0500] - INFO - check_and_set_import_cache - pagesize: 4096, available bytes 7487520768, process usage 22966272 [09/Nov/2020:21:01:23.472824608 -0500] - INFO - check_and_set_import_cache - Import allocates 2924812KB import cache. [09/Nov/2020:21:01:23.477821071 -0500] - INFO - bdb_copy_directory - Backing up file 0 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T21:01:23.261738/userRoot/givenName.db) [09/Nov/2020:21:01:23.483545384 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/givenName.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T21:01:23.261738/userRoot/givenName.db [09/Nov/2020:21:01:23.487827747 -0500] - INFO - bdb_copy_directory - Backing up file 1 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T21:01:23.261738/userRoot/aci.db) [09/Nov/2020:21:01:23.492477123 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/aci.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T21:01:23.261738/userRoot/aci.db [09/Nov/2020:21:01:23.498646581 -0500] - INFO - bdb_copy_directory - Backing up file 2 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T21:01:23.261738/userRoot/sn.db) [09/Nov/2020:21:01:23.503506355 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/sn.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T21:01:23.261738/userRoot/sn.db [09/Nov/2020:21:01:23.510824894 -0500] - INFO - bdb_copy_directory - Backing up file 3 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T21:01:23.261738/userRoot/numsubordinates.db) [09/Nov/2020:21:01:23.514081895 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/numsubordinates.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T21:01:23.261738/userRoot/numsubordinates.db [09/Nov/2020:21:01:23.519616171 -0500] - INFO - bdb_copy_directory - Backing up file 4 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T21:01:23.261738/userRoot/nsuniqueid.db) [09/Nov/2020:21:01:23.530479380 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/nsuniqueid.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T21:01:23.261738/userRoot/nsuniqueid.db [09/Nov/2020:21:01:23.537891315 -0500] - INFO - bdb_copy_directory - Backing up file 5 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T21:01:23.261738/userRoot/ancestorid.db) [09/Nov/2020:21:01:23.542550094 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/ancestorid.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T21:01:23.261738/userRoot/ancestorid.db [09/Nov/2020:21:01:23.547522649 -0500] - INFO - bdb_copy_directory - Backing up file 6 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T21:01:23.261738/userRoot/parentid.db) [09/Nov/2020:21:01:23.553383526 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/parentid.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T21:01:23.261738/userRoot/parentid.db [09/Nov/2020:21:01:23.557054255 -0500] - INFO - bdb_copy_directory - Backing up file 7 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T21:01:23.261738/userRoot/mail.db) [09/Nov/2020:21:01:23.562954680 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/mail.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T21:01:23.261738/userRoot/mail.db [09/Nov/2020:21:01:23.578084255 -0500] - INFO - bdb_copy_directory - Backing up file 8 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T21:01:23.261738/userRoot/telephoneNumber.db) [09/Nov/2020:21:01:23.584457361 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/telephoneNumber.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T21:01:23.261738/userRoot/telephoneNumber.db [09/Nov/2020:21:01:23.598340880 -0500] - INFO - bdb_copy_directory - Backing up file 9 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T21:01:23.261738/userRoot/cn.db) [09/Nov/2020:21:01:23.602272622 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/cn.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T21:01:23.261738/userRoot/cn.db [09/Nov/2020:21:01:23.606209737 -0500] - INFO - bdb_copy_directory - Backing up file 10 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T21:01:23.261738/userRoot/roomNumber.db) [09/Nov/2020:21:01:23.611490662 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/roomNumber.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T21:01:23.261738/userRoot/roomNumber.db [09/Nov/2020:21:01:23.615565938 -0500] - INFO - bdb_copy_directory - Backing up file 11 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T21:01:23.261738/userRoot/DBVERSION) [09/Nov/2020:21:01:23.619979972 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/DBVERSION to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T21:01:23.261738/userRoot/DBVERSION [09/Nov/2020:21:01:23.625342748 -0500] - INFO - bdb_copy_directory - Backing up file 12 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T21:01:23.261738/userRoot/uid.db) [09/Nov/2020:21:01:23.636567586 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/uid.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T21:01:23.261738/userRoot/uid.db [09/Nov/2020:21:01:23.642572170 -0500] - INFO - bdb_copy_directory - Backing up file 13 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T21:01:23.261738/userRoot/entryrdn.db) [09/Nov/2020:21:01:23.670903588 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/entryrdn.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T21:01:23.261738/userRoot/entryrdn.db [09/Nov/2020:21:01:23.675029060 -0500] - INFO - bdb_copy_directory - Backing up file 14 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T21:01:23.261738/userRoot/id2entry.db) [09/Nov/2020:21:01:23.681542826 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/id2entry.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T21:01:23.261738/userRoot/id2entry.db [09/Nov/2020:21:01:23.687731678 -0500] - INFO - bdb_copy_directory - Backing up file 15 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T21:01:23.261738/userRoot/objectclass.db) [09/Nov/2020:21:01:23.691237225 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/objectclass.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T21:01:23.261738/userRoot/objectclass.db [09/Nov/2020:21:01:23.697180140 -0500] - INFO - upgradedb_core - userRoot: Start upgradedb. [09/Nov/2020:21:01:23.701456667 -0500] - INFO - check_and_set_import_cache - pagesize: 4096, available bytes 7487152128, process usage 23887872 [09/Nov/2020:21:01:23.704686290 -0500] - INFO - check_and_set_import_cache - Import allocates 2924668KB import cache. [09/Nov/2020:21:01:24.355186444 -0500] - INFO - bdb_import_main - reindex userRoot: Index buffering enabled with bucket size 100 [09/Nov/2020:21:01:25.062983647 -0500] - INFO - import_monitor_threads - reindex userRoot: Workers finished; cleaning up... [09/Nov/2020:21:01:25.270194735 -0500] - INFO - import_monitor_threads - reindex userRoot: Workers cleaned up. [09/Nov/2020:21:01:25.278721594 -0500] - INFO - bdb_import_main - reindex userRoot: Cleaning up producer thread... [09/Nov/2020:21:01:25.291902882 -0500] - INFO - bdb_import_main - reindex userRoot: Indexing complete. Post-processing... [09/Nov/2020:21:01:25.297019385 -0500] - INFO - bdb_import_main - reindex userRoot: Generating numsubordinates (this may take several minutes to complete)... [09/Nov/2020:21:01:25.303566138 -0500] - INFO - bdb_import_main - reindex userRoot: Generating numSubordinates complete. [09/Nov/2020:21:01:25.308301852 -0500] - INFO - bdb_get_nonleaf_ids - reindex userRoot: Gathering ancestorid non-leaf IDs... [09/Nov/2020:21:01:25.312889045 -0500] - INFO - bdb_get_nonleaf_ids - reindex userRoot: Finished gathering ancestorid non-leaf IDs. [09/Nov/2020:21:01:25.317176761 -0500] - INFO - ldbm_get_nonleaf_ids - reindex userRoot: Starting sort of ancestorid non-leaf IDs... [09/Nov/2020:21:01:25.329546669 -0500] - INFO - ldbm_get_nonleaf_ids - reindex userRoot: Finished sort of ancestorid non-leaf IDs. [09/Nov/2020:21:01:25.340211759 -0500] - INFO - bdb_ancestorid_new_idl_create_index - reindex userRoot: Creating ancestorid index (new idl)... [09/Nov/2020:21:01:25.345609746 -0500] - INFO - bdb_ancestorid_new_idl_create_index - reindex userRoot: Created ancestorid index (new idl). [09/Nov/2020:21:01:25.349994252 -0500] - INFO - bdb_import_main - reindex userRoot: Flushing caches... [09/Nov/2020:21:01:25.358779458 -0500] - INFO - bdb_import_main - reindex userRoot: Closing files... [09/Nov/2020:21:01:25.515450760 -0500] - INFO - bdb_import_main - reindex userRoot: Reindexing complete. Processed 15 entries in 1 seconds. (15.00 entries/sec) [09/Nov/2020:21:01:25.520283266 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/log.0000000001 to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T21:01:23.261738/log.0000000001 [09/Nov/2020:21:01:25.548147496 -0500] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/DBVERSION to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-11-09T21:01:23.261738/DBVERSION [09/Nov/2020:21:01:25.552031536 -0500] - INFO - bdb_pre_close - All database threads now stopped | |||
Passed | suites/import/regression_test.py::test_replay_import_operation | 32.49 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.import.regression_test:regression_test.py:94 Exporting LDIF online... [32mINFO [0m tests.suites.import.regression_test:regression_test.py:53 Run. [32mINFO [0m tests.suites.import.regression_test:regression_test.py:104 Importing LDIF online, should raise operation error. [32mINFO [0m tests.suites.import.regression_test:regression_test.py:115 Looping. Tried 1 times so far. [32mINFO [0m tests.suites.import.regression_test:regression_test.py:57 Adding users. [32mINFO [0m tests.suites.import.regression_test:regression_test.py:119 Importing LDIF online | |||
Passed | suites/import/regression_test.py::test_import_be_default | 6.95 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.import.regression_test:regression_test.py:143 Adding suffix:dc=default,dc=com and backend: default... [32mINFO [0m tests.suites.import.regression_test:regression_test.py:149 Create LDIF file and import it... [32mINFO [0m tests.suites.import.regression_test:regression_test.py:154 Stopping the server and running offline import... [32mINFO [0m tests.suites.import.regression_test:regression_test.py:160 Verifying entry count after import... [32mINFO [0m tests.suites.import.regression_test:regression_test.py:166 Test PASSED | |||
Passed | suites/import/regression_test.py::test_del_suffix_import | 6.93 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.import.regression_test:regression_test.py:183 Adding suffix:dc=importest1,dc=com and backend: importest1 [32mINFO [0m tests.suites.import.regression_test:regression_test.py:188 Create LDIF file and import it [32mINFO [0m tests.suites.import.regression_test:regression_test.py:194 Stopping the server and running offline import [32mINFO [0m tests.suites.import.regression_test:regression_test.py:199 Deleting suffix-dc=importest2,dc=com [32mINFO [0m tests.suites.import.regression_test:regression_test.py:202 Adding the same database-importest1 after deleting it | |||
Passed | suites/import/regression_test.py::test_del_suffix_backend | 8.26 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.import.regression_test:regression_test.py:221 Adding suffix:dc=importest2,dc=com and backend: importest2 [32mINFO [0m tests.suites.import.regression_test:regression_test.py:226 Create LDIF file and import it [32mINFO [0m lib389:tasks.py:498 Import task import_11092020_210231 for file /var/lib/dirsrv/slapd-standalone1/ldif/suffix_del2.ldif completed successfully [32mINFO [0m tests.suites.import.regression_test:regression_test.py:234 Deleting suffix-dc=importest2,dc=com [32mINFO [0m tests.suites.import.regression_test:regression_test.py:237 Adding the same database-importest2 after deleting it [32mINFO [0m tests.suites.import.regression_test:regression_test.py:240 Checking if server can be restarted after re-adding the same database | |||
Passed | suites/import/regression_test.py::test_import_duplicate_dn | 16.28 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.import.regression_test:regression_test.py:266 Delete the previous error logs [32mINFO [0m tests.suites.import.regression_test:regression_test.py:269 Create import file [32mINFO [0m tests.suites.import.regression_test:regression_test.py:293 Import ldif with duplicate entry [31m[1mERROR [0m lib389:tasks.py:495 Error: import task import_11092020_210242 for file /var/lib/dirsrv/slapd-standalone1/ldif/data.ldif exited with -23 [32mINFO [0m tests.suites.import.regression_test:regression_test.py:296 Restart the server to flush the logs [32mINFO [0m tests.suites.import.regression_test:regression_test.py:299 Error log should not have "unable to flush" message [32mINFO [0m tests.suites.import.regression_test:regression_test.py:302 Error log should have "Duplicated DN detected" message | |||
Passed | suites/import/regression_test.py::test_large_ldif2db_ancestorid_index_creation | 648.88 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.import.regression_test:regression_test.py:355 Delete the previous errors logs [32mINFO [0m tests.suites.import.regression_test:regression_test.py:358 Add suffix:o=test and backend: test... [32mINFO [0m tests.suites.import.regression_test:regression_test.py:371 Create a large nested ldif file using dbgen : /var/lib/dirsrv/slapd-standalone1/ldif/large_nested.ldif [32mINFO [0m tests.suites.import.regression_test:regression_test.py:374 Stop the server and run offline import... [32mINFO [0m tests.suites.import.regression_test:regression_test.py:379 Starting the server [32mINFO [0m tests.suites.import.regression_test:regression_test.py:382 parse the errors logs to check lines with "Starting sort of ancestorid" are present [32mINFO [0m tests.suites.import.regression_test:regression_test.py:386 parse the errors logs to check lines with "Finished sort of ancestorid" are present [32mINFO [0m tests.suites.import.regression_test:regression_test.py:390 parse the error logs for the line with "Gathering ancestorid non-leaf IDs" [32mINFO [0m tests.suites.import.regression_test:regression_test.py:394 parse the error logs for the line with "Created ancestorid index" [32mINFO [0m tests.suites.import.regression_test:regression_test.py:398 get the ancestorid non-leaf IDs indexing start and end time from the collected strings [32mINFO [0m tests.suites.import.regression_test:regression_test.py:404 Calculate the elapsed time for the ancestorid non-leaf IDs index creation | |||
Passed | suites/indexes/regression_test.py::test_reindex_task_creates_abandoned_index_file | 13.58 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:tasks.py:798 Index task index_all_11092020_211359 completed successfully [32mINFO [0m lib389:tasks.py:798 Index task index_all_11092020_211402 completed successfully [32mINFO [0m lib389:tasks.py:798 Index task index_all_11092020_211409 completed successfully | |||
Passed | suites/lib389/config_compare_test.py::test_config_compare | 0.02 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38902, 'ldap-secureport': 63602, 'server-id': 'standalone2', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/lib389/dsldapobject/dn_construct_test.py::test_mul_explicit_rdn | 0.02 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/lib389/dsldapobject/dn_construct_test.py::test_mul_derive_single_dn | 0.13 | |
No log output captured. | |||
Passed | suites/lib389/dsldapobject/dn_construct_test.py::test_mul_derive_mult_dn | 0.02 | |
No log output captured. | |||
Passed | suites/lib389/dsldapobject/dn_construct_test.py::test_sin_explicit_dn | 0.02 | |
No log output captured. | |||
Passed | suites/lib389/dsldapobject/dn_construct_test.py::test_sin_explicit_rdn | 0.11 | |
No log output captured. | |||
Passed | suites/lib389/dsldapobject/dn_construct_test.py::test_sin_derive_single_dn | 0.27 | |
No log output captured. | |||
Passed | suites/lib389/dsldapobject/dn_construct_test.py::test_sin_derive_mult_dn | 0.03 | |
No log output captured. | |||
Passed | suites/lib389/dsldapobject/dn_construct_test.py::test_sin_invalid_no_basedn | 0.01 | |
No log output captured. | |||
Passed | suites/lib389/dsldapobject/dn_construct_test.py::test_sin_invalid_no_rdn | 0.01 | |
No log output captured. | |||
Passed | suites/lib389/dsldapobject/dn_construct_test.py::test_sin_non_present_rdn | 0.03 | |
No log output captured. | |||
Passed | suites/lib389/idm/user_compare_i2_test.py::test_user_compare_i2 | 0.09 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38902, 'ldap-secureport': 63602, 'server-id': 'standalone2', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/lib389/idm/user_compare_m2Repl_test.py::test_user_compare_m2Repl | 1.09 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect fc7996d8-4b67-4cb8-bc30-ce3a61e72f3a / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 0473e287-e844-4cfe-a6cd-39eecfe2603c / got description=fc7996d8-4b67-4cb8-bc30-ce3a61e72f3a) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect a6abb3cd-5ec0-410a-ae9c-d2e354ee8af8 / got description=0473e287-e844-4cfe-a6cd-39eecfe2603c) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working | |||
Passed | suites/lib389/idm/user_compare_st_test.py::test_user_compare | 0.33 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/logging/logging_config_test.py::test_logging_digit_config[logexpirationtime-invalid_vals0-valid_vals0] | 0.24 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/logging/logging_config_test.py::test_logging_digit_config[maxlogsize-invalid_vals1-valid_vals1] | 0.12 | |
No log output captured. | |||
Passed | suites/logging/logging_config_test.py::test_logging_digit_config[logmaxdiskspace-invalid_vals2-valid_vals2] | 0.15 | |
No log output captured. | |||
Passed | suites/logging/logging_config_test.py::test_logging_digit_config[logminfreediskspace-invalid_vals3-valid_vals3] | 0.13 | |
No log output captured. | |||
Passed | suites/logging/logging_config_test.py::test_logging_digit_config[mode-invalid_vals4-valid_vals4] | 0.19 | |
No log output captured. | |||
Passed | suites/logging/logging_config_test.py::test_logging_digit_config[maxlogsperdir-invalid_vals5-valid_vals5] | 0.18 | |
No log output captured. | |||
Passed | suites/logging/logging_config_test.py::test_logging_digit_config[logrotationsynchour-invalid_vals6-valid_vals6] | 0.23 | |
No log output captured. | |||
Passed | suites/logging/logging_config_test.py::test_logging_digit_config[logrotationsyncmin-invalid_vals7-valid_vals7] | 0.19 | |
No log output captured. | |||
Passed | suites/logging/logging_config_test.py::test_logging_digit_config[logrotationtime-invalid_vals8-valid_vals8] | 0.15 | |
No log output captured. | |||
Passed | suites/mapping_tree/acceptance_test.py::test_invalid_mt | 0.02 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/mapping_tree/be_del_and_default_naming_attr_test.py::test_be_delete | 1.16 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. | |||
Passed | suites/mapping_tree/referral_during_tot_init_test.py::test_referral_during_tot | 10.09 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 888b194d-a990-4b16-b9b5-455ad96dcf5e / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect cdf98381-8cda-4534-b9bd-a15b1ca1d2f8 / got description=888b194d-a990-4b16-b9b5-455ad96dcf5e) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists ------------------------------Captured stderr call------------------------------ [09/Nov/2020:21:17:29.273614540 -0500] - INFO - slapd_exemode_ldif2db - Backend Instance: userRoot | |||
Passed | suites/memberof_plugin/regression_test.py::test_memberof_with_repl | 78.53 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for hub1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39101, 'ldap-secureport': 63801, 'server-id': 'hub1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for consumer1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:524 Creating replication topology. [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39101 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39101 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39101 is NOT working (expect f0735608-7c64-4023-82bc-dd9b8872849e / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39101 is working [32mINFO [0m lib389.replica:replica.py:2211 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39101 [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39101 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is was created [32mINFO [0m lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39101 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 25b6cb93-5e01-4f7c-9c33-8799a2b36677 / got description=f0735608-7c64-4023-82bc-dd9b8872849e) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is working -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.memberof_plugin.regression_test:regression_test.py:77 update cn=101,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config to add nsDS5ReplicatedAttributeListTotal [32mINFO [0m tests.suites.memberof_plugin.regression_test:regression_test.py:77 update cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config to add nsDS5ReplicatedAttributeListTotal | |||
Passed | suites/memberof_plugin/regression_test.py::test_scheme_violation_errors_logged | 4.95 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect cbb6d384-caca-402d-bbf7-42d1baac900e / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 622b9ef0-923e-49c8-8b98-ebd2668cdc5f / got description=cbb6d384-caca-402d-bbf7-42d1baac900e) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.memberof_plugin.regression_test:regression_test.py:319 memberOf attr value - cn=group1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.memberof_plugin.regression_test:regression_test.py:322 pattern = .*oc_check_allowed_sv.*uid=user_,ou=people,dc=example,dc=com.*memberOf.*not allowed.* | |||
Passed | suites/memberof_plugin/regression_test.py::test_memberof_with_changelog_reset | 72.66 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.memberof_plugin.regression_test:regression_test.py:354 Configure memberof on M1 and M2 [32mINFO [0m tests.suites.memberof_plugin.regression_test:regression_test.py:365 On M1, add 999 test entries allowing memberof [32mINFO [0m tests.suites.memberof_plugin.regression_test:regression_test.py:51 Adding 999 users [32mINFO [0m tests.suites.memberof_plugin.regression_test:regression_test.py:368 On M1, add a group with these 999 entries as members [32mINFO [0m tests.suites.memberof_plugin.regression_test:regression_test.py:376 Adding the test group using async function [32mINFO [0m tests.suites.memberof_plugin.regression_test:regression_test.py:386 Check the log messages for error [32mINFO [0m tests.suites.memberof_plugin.regression_test:regression_test.py:390 Check that the replication is working fine both ways, M1 <-> M2 [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect bef72e44-5d0f-4733-9d1e-976c7a597090 / got description=622b9ef0-923e-49c8-8b98-ebd2668cdc5f) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect b76d2f2a-856c-448a-a417-a1cd68d0df32 / got description=bef72e44-5d0f-4733-9d1e-976c7a597090) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect b76d2f2a-856c-448a-a417-a1cd68d0df32 / got description=bef72e44-5d0f-4733-9d1e-976c7a597090) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working | |||
Passed | suites/memberof_plugin/regression_test.py::test_memberof_group | 5.23 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.memberof_plugin.regression_test:regression_test.py:481 Enable memberof plugin and set the scope as cn=sub1,dc=example,dc=com [32mINFO [0m lib389:regression_test.py:440 !!!!!!! uid=test_m1,cn=sub1,dc=example,dc=com: memberof->b'cn=g1,cn=sub1,dc=example,dc=com' [32mINFO [0m lib389:regression_test.py:440 !!!!!!! uid=test_m2,cn=sub1,dc=example,dc=com: memberof->b'cn=g1,cn=sub1,dc=example,dc=com' [32mINFO [0m lib389:regression_test.py:440 !!!!!!! uid=test_m1,cn=sub1,dc=example,dc=com: memberof->b'cn=g1,cn=sub1,dc=example,dc=com' [32mINFO [0m lib389:regression_test.py:440 !!!!!!! uid=test_m2,cn=sub1,dc=example,dc=com: memberof->b'cn=g1,cn=sub1,dc=example,dc=com' [31mCRITICAL[0m tests.suites.memberof_plugin.regression_test:regression_test.py:427 Renaming user (cn=g2,cn=sub2,dc=example,dc=com): new cn=g2-new [32mINFO [0m lib389:regression_test.py:440 !!!!!!! uid=test_m1,cn=sub1,dc=example,dc=com: memberof->b'cn=g1,cn=sub1,dc=example,dc=com' [32mINFO [0m lib389:regression_test.py:440 !!!!!!! uid=test_m2,cn=sub1,dc=example,dc=com: memberof->b'cn=g1,cn=sub1,dc=example,dc=com' [32mINFO [0m lib389:regression_test.py:440 !!!!!!! uid=test_m1,cn=sub1,dc=example,dc=com: memberof->b'cn=g1,cn=sub1,dc=example,dc=com' [32mINFO [0m lib389:regression_test.py:440 !!!!!!! uid=test_m1,cn=sub1,dc=example,dc=com: memberof->b'cn=g2-new,cn=sub1,dc=example,dc=com' [32mINFO [0m lib389:regression_test.py:440 !!!!!!! uid=test_m2,cn=sub1,dc=example,dc=com: memberof->b'cn=g1,cn=sub1,dc=example,dc=com' [32mINFO [0m lib389:regression_test.py:440 !!!!!!! uid=test_m2,cn=sub1,dc=example,dc=com: memberof->b'cn=g2-new,cn=sub1,dc=example,dc=com' | |||
Passed | suites/memberof_plugin/regression_test.py::test_entrycache_on_modrdn_failure | 10.39 | |
-------------------------------Captured log call-------------------------------- [31mCRITICAL[0m tests.suites.memberof_plugin.regression_test:regression_test.py:575 Adding user (cn=user0,ou=people,dc=example,dc=com): [31mCRITICAL[0m tests.suites.memberof_plugin.regression_test:regression_test.py:575 Adding user (cn=user1,ou=people,dc=example,dc=com): [31mCRITICAL[0m tests.suites.memberof_plugin.regression_test:regression_test.py:575 Adding user (cn=user2,ou=people,dc=example,dc=com): [31mCRITICAL[0m tests.suites.memberof_plugin.regression_test:regression_test.py:575 Adding user (cn=user3,ou=people,dc=example,dc=com): [31mCRITICAL[0m tests.suites.memberof_plugin.regression_test:regression_test.py:575 Adding user (cn=user4,ou=people,dc=example,dc=com): [31mCRITICAL[0m tests.suites.memberof_plugin.regression_test:regression_test.py:575 Adding user (cn=user5,ou=people,dc=example,dc=com): [31mCRITICAL[0m tests.suites.memberof_plugin.regression_test:regression_test.py:575 Adding user (cn=user6,ou=people,dc=example,dc=com): [31mCRITICAL[0m tests.suites.memberof_plugin.regression_test:regression_test.py:575 Adding user (cn=user7,ou=people,dc=example,dc=com): [31mCRITICAL[0m tests.suites.memberof_plugin.regression_test:regression_test.py:575 Adding user (cn=user8,ou=people,dc=example,dc=com): [31mCRITICAL[0m tests.suites.memberof_plugin.regression_test:regression_test.py:575 Adding user (cn=user9,ou=people,dc=example,dc=com): [32mINFO [0m lib389:regression_test.py:596 !!!!!!! cn=user0,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_in0,ou=people,dc=example,dc=com') [32mINFO [0m lib389:regression_test.py:596 !!!!!!! cn=user1,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_in0,ou=people,dc=example,dc=com') [32mINFO [0m lib389:regression_test.py:618 !!!!!!! cn=user0,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_out1,dc=example,dc=com') [32mINFO [0m lib389:regression_test.py:618 !!!!!!! cn=user1,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_out1,dc=example,dc=com') [32mINFO [0m lib389:regression_test.py:633 !!!!!!! cn=user0,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_in1,ou=people,dc=example,dc=com') [32mINFO [0m lib389:regression_test.py:633 !!!!!!! cn=user0,ou=people,dc=example,dc=com: memberof->b'cn=group_in1,ou=people,dc=example,dc=com' (vs b'cn=group_in1,ou=people,dc=example,dc=com') [32mINFO [0m lib389:regression_test.py:633 !!!!!!! cn=user1,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_in1,ou=people,dc=example,dc=com') [32mINFO [0m lib389:regression_test.py:633 !!!!!!! cn=user1,ou=people,dc=example,dc=com: memberof->b'cn=group_in1,ou=people,dc=example,dc=com' (vs b'cn=group_in1,ou=people,dc=example,dc=com') [32mINFO [0m lib389:regression_test.py:672 retrieve: cn=group_admin,ou=permissions,dc=example,dc=com with desc=None [32mINFO [0m lib389:regression_test.py:672 retrieve: cn=group_modify,ou=permissions,dc=example,dc=com with desc=None [32mINFO [0m lib389:regression_test.py:672 retrieve: cn=group_in0,ou=people,dc=example,dc=com with desc=b'mygroup' [32mINFO [0m lib389:regression_test.py:672 retrieve: cn=group_in1,ou=people,dc=example,dc=com with desc=b'mygroup' [32mINFO [0m lib389:regression_test.py:672 retrieve: cn=group_out2,dc=example,dc=com with desc=b'this is to check that the entry having this description has the appropriate DN' | |||
Passed | suites/memberof_plugin/regression_test.py::test_silent_memberof_failure | 10.56 | |
-------------------------------Captured log call-------------------------------- [31mCRITICAL[0m tests.suites.memberof_plugin.regression_test:regression_test.py:738 Adding user (cn=user0,ou=people,dc=example,dc=com): [31mCRITICAL[0m tests.suites.memberof_plugin.regression_test:regression_test.py:738 Adding user (cn=user1,ou=people,dc=example,dc=com): [31mCRITICAL[0m tests.suites.memberof_plugin.regression_test:regression_test.py:738 Adding user (cn=user2,ou=people,dc=example,dc=com): [31mCRITICAL[0m tests.suites.memberof_plugin.regression_test:regression_test.py:738 Adding user (cn=user3,ou=people,dc=example,dc=com): [31mCRITICAL[0m tests.suites.memberof_plugin.regression_test:regression_test.py:738 Adding user (cn=user4,ou=people,dc=example,dc=com): [31mCRITICAL[0m tests.suites.memberof_plugin.regression_test:regression_test.py:738 Adding user (cn=user5,ou=people,dc=example,dc=com): [31mCRITICAL[0m tests.suites.memberof_plugin.regression_test:regression_test.py:738 Adding user (cn=user6,ou=people,dc=example,dc=com): [31mCRITICAL[0m tests.suites.memberof_plugin.regression_test:regression_test.py:738 Adding user (cn=user7,ou=people,dc=example,dc=com): [31mCRITICAL[0m tests.suites.memberof_plugin.regression_test:regression_test.py:738 Adding user (cn=user8,ou=people,dc=example,dc=com): [31mCRITICAL[0m tests.suites.memberof_plugin.regression_test:regression_test.py:738 Adding user (cn=user9,ou=people,dc=example,dc=com): [32mINFO [0m lib389:regression_test.py:759 !!!!!!! cn=user0,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_in0,ou=people,dc=example,dc=com') [32mINFO [0m lib389:regression_test.py:759 !!!!!!! cn=user1,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_in0,ou=people,dc=example,dc=com') [32mINFO [0m lib389:regression_test.py:781 !!!!!!! cn=user0,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_out1,dc=example,dc=com') [32mINFO [0m lib389:regression_test.py:781 !!!!!!! cn=user1,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_out1,dc=example,dc=com') [32mINFO [0m lib389:regression_test.py:796 !!!!!!! cn=user0,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_in1,ou=people,dc=example,dc=com') [32mINFO [0m lib389:regression_test.py:796 !!!!!!! cn=user0,ou=people,dc=example,dc=com: memberof->b'cn=group_in1,ou=people,dc=example,dc=com' (vs b'cn=group_in1,ou=people,dc=example,dc=com') [32mINFO [0m lib389:regression_test.py:796 !!!!!!! cn=user1,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_in1,ou=people,dc=example,dc=com') [32mINFO [0m lib389:regression_test.py:796 !!!!!!! cn=user1,ou=people,dc=example,dc=com: memberof->b'cn=group_in1,ou=people,dc=example,dc=com' (vs b'cn=group_in1,ou=people,dc=example,dc=com') [32mINFO [0m lib389:regression_test.py:833 Should assert cn=user2,ou=people,dc=example,dc=com has memberof is False [32mINFO [0m lib389:regression_test.py:833 Should assert cn=user3,ou=people,dc=example,dc=com has memberof is False [32mINFO [0m lib389:regression_test.py:856 Should assert cn=user4,ou=people,dc=example,dc=com has memberof is False [32mINFO [0m lib389:regression_test.py:856 Should assert cn=user5,ou=people,dc=example,dc=com has memberof is False | |||
Passed | suites/monitor/monitor_test.py::test_monitor | 0.27 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.monitor.monitor_test:monitor_test.py:44 connection: ['1:20201110022254Z:3:2:-:cn=directory manager:0:0:0:1:ip=127.0.0.1'], currentconnections: ['1'], totalconnections: ['1'] [32mINFO [0m tests.suites.monitor.monitor_test:monitor_test.py:48 version :: ['1:20201110022254Z:6:5:-:cn=directory manager:0:0:0:1:ip=127.0.0.1'] [32mINFO [0m tests.suites.monitor.monitor_test:monitor_test.py:52 threads: ['16'],currentconnectionsatmaxthreads: ['0'],maxthreadsperconnhits: ['0'] [32mINFO [0m tests.suites.monitor.monitor_test:monitor_test.py:56 nbackends: ['1'], backendmonitordn: ['cn=monitor,cn=userRoot,cn=ldbm database,cn=plugins,cn=config'] [32mINFO [0m tests.suites.monitor.monitor_test:monitor_test.py:60 opsinitiated: ['12'], opscompleted: ['13'] [32mINFO [0m tests.suites.monitor.monitor_test:monitor_test.py:64 dtablesize: ['1024'],readwaiters: ['0'],entriessent: ['14'],bytessent: ['1097'],currenttime: ['20201110022254Z'],starttime: ['20201110022254Z'] | |||
Passed | suites/monitor/monitor_test.py::test_monitor_ldbm | 0.02 | |
No log output captured. | |||
Passed | suites/monitor/monitor_test.py::test_monitor_backend | 0.01 | |
No log output captured. | |||
Passed | suites/openldap_2_389/migrate_test.py::test_parse_openldap_slapdd | 0.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.migrate.openldap.config:config.py:264 Examining OpenLDAP Configuration ... [32mINFO [0m lib389.migrate.openldap.config:config.py:285 Completed OpenLDAP Configuration Parsing. | |||
Passed | suites/openldap_2_389/migrate_test.py::test_migrate_openldap_slapdd | 24.59 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. ------------------------------Captured stdout call------------------------------ ==== migration plan ==== SchemaAttributeCreate -> ('pseudonym',) SchemaAttributeCreate -> ('email', 'emailAddress', 'pkcs9email') SchemaAttributeCreate -> ('textEncodedORAddress',) SchemaAttributeUnsupported -> ('otherMailbox',) SchemaAttributeCreate -> ('aRecord',) SchemaAttributeCreate -> ('mDRecord',) SchemaAttributeCreate -> ('mXRecord',) SchemaAttributeCreate -> ('nSRecord',) SchemaAttributeCreate -> ('sOARecord',) SchemaAttributeCreate -> ('cNAMERecord',) SchemaAttributeCreate -> ('janetMailbox',) SchemaAttributeCreate -> ('mailPreferenceOption',) SchemaAttributeUnsupported -> ('dSAQuality',) SchemaAttributeUnsupported -> ('singleLevelQuality',) SchemaAttributeUnsupported -> ('subtreeMinimumQuality',) SchemaAttributeUnsupported -> ('subtreeMaximumQuality',) SchemaAttributeCreate -> ('personalSignature',) SchemaAttributeCreate -> ('suseDefaultBase',) SchemaAttributeCreate -> ('suseNextUniqueId',) SchemaAttributeCreate -> ('suseMinUniqueId',) SchemaAttributeCreate -> ('suseMaxUniqueId',) SchemaAttributeCreate -> ('suseDefaultTemplate',) SchemaAttributeCreate -> ('suseSearchFilter',) SchemaAttributeCreate -> ('suseDefaultValue',) SchemaAttributeCreate -> ('suseNamingAttribute',) SchemaAttributeCreate -> ('suseSecondaryGroup',) SchemaAttributeCreate -> ('suseMinPasswordLength',) SchemaAttributeCreate -> ('suseMaxPasswordLength',) SchemaAttributeCreate -> ('susePasswordHash',) SchemaAttributeCreate -> ('suseSkelDir',) SchemaAttributeCreate -> ('susePlugin',) SchemaAttributeCreate -> ('suseMapAttribute',) SchemaAttributeCreate -> ('suseImapServer',) SchemaAttributeCreate -> ('suseImapAdmin',) SchemaAttributeCreate -> ('suseImapDefaultQuota',) SchemaAttributeCreate -> ('suseImapUseSsl',) SchemaClassUnsupported -> 0.9.2342.19200300.100.4.4 ('pilotPerson', 'newPilotPerson') may -> ('userid', 'textEncodedORAddress', 'rfc822Mailbox', 'favouriteDrink', 'roomNumber', 'userClass', 'homeTelephoneNumber', 'homePostalAddress', 'secretary', 'personalTitle', 'preferredDeliveryMethod', 'businessCategory', 'janetMailbox', 'otherMailbox', 'mobileTelephoneNumber', 'pagerTelephoneNumber', 'organizationalStatus', 'mailPreferenceOption', 'personalSignature') must -> () sup -> ('person',) SchemaClassCreate -> 0.9.2342.19200300.100.4.15 ('dNSDomain',) may -> ('ARecord', 'MDRecord', 'MXRecord', 'NSRecord', 'SOARecord', 'CNAMERecord') must -> () sup -> ('domain',) SchemaClassCreate -> 0.9.2342.19200300.100.4.20 ('pilotOrganization',) may -> ('buildingName',) must -> () sup -> ('organization', 'organizationalUnit') SchemaClassUnsupported -> 0.9.2342.19200300.100.4.21 ('pilotDSA',) may -> ('dSAQuality',) must -> () sup -> ('dsa',) SchemaClassUnsupported -> 0.9.2342.19200300.100.4.22 ('qualityLabelledData',) may -> ('subtreeMinimumQuality', 'subtreeMaximumQuality') must -> ('dsaQuality',) sup -> ('top',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:2 ('suseModuleConfiguration',) may -> ('suseDefaultBase',) must -> ('cn',) sup -> ('top',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:3 ('suseUserConfiguration',) may -> ('suseMinPasswordLength', 'suseMaxPasswordLength', 'susePasswordHash', 'suseSkelDir', 'suseNextUniqueId', 'suseMinUniqueId', 'suseMaxUniqueId', 'suseDefaultTemplate', 'suseSearchFilter', 'suseMapAttribute') must -> () sup -> ('suseModuleConfiguration',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:4 ('suseObjectTemplate',) may -> ('susePlugin', 'suseDefaultValue', 'suseNamingAttribute') must -> ('cn',) sup -> ('top',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:5 ('suseUserTemplate',) may -> ('suseSecondaryGroup',) must -> ('cn',) sup -> ('suseObjectTemplate',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:6 ('suseGroupTemplate',) may -> () must -> ('cn',) sup -> ('suseObjectTemplate',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:7 ('suseGroupConfiguration',) may -> ('suseNextUniqueId', 'suseMinUniqueId', 'suseMaxUniqueId', 'suseDefaultTemplate', 'suseSearchFilter', 'suseMapAttribute') must -> () sup -> ('suseModuleConfiguration',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:8 ('suseCaConfiguration',) may -> () must -> () sup -> ('suseModuleConfiguration',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:9 ('suseDnsConfiguration',) may -> () must -> () sup -> ('suseModuleConfiguration',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:10 ('suseDhcpConfiguration',) may -> () must -> () sup -> ('suseModuleConfiguration',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:11 ('suseMailConfiguration',) may -> () must -> ('suseImapServer', 'suseImapAdmin', 'suseImapDefaultQuota', 'suseImapUseSsl') sup -> ('suseModuleConfiguration',) DatabaseReindex -> dc=example,dc=com PluginMemberOfEnable PluginMemberOfScope -> dc=example,dc=com PluginMemberOfFixup -> dc=example,dc=com PluginRefintEnable PluginRefintAttributes -> member PluginRefintAttributes -> memberOf PluginRefintScope -> dc=example,dc=com PluginUniqueConfigure -> dc=example,dc=com, mail 401a528e-eaf5-1039-8667-dbfbf2f5e6dd PluginUniqueConfigure -> dc=example,dc=com, uid 401a528e-eaf5-1039-8667-dbfbf2f5e6dd DatabaseCreate -> dc=example,dc=net, 401a7084-eaf5-1039-866c-dbfbf2f5e6dd DatabaseIndexCreate -> objectClass eq, dc=example,dc=net DatabaseReindex -> dc=example,dc=net PluginMemberOfEnable PluginMemberOfScope -> dc=example,dc=net PluginMemberOfFixup -> dc=example,dc=net PluginUniqueConfigure -> dc=example,dc=net, mail 401a7084-eaf5-1039-866c-dbfbf2f5e6dd PluginUniqueConfigure -> dc=example,dc=net, uid 401a7084-eaf5-1039-866c-dbfbf2f5e6dd DatabaseLdifImport -> dc=example,dc=com /export/tests/suites/openldap_2_389/../../data/openldap_2_389/1/example_com.slapcat.ldif DatabaseLdifImport -> dc=example,dc=net /export/tests/suites/openldap_2_389/../../data/openldap_2_389/1/example_net.slapcat.ldif ==== end migration plan ==== -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.migrate.openldap.config:config.py:264 Examining OpenLDAP Configuration ... [32mINFO [0m lib389.migrate.openldap.config:config.py:285 Completed OpenLDAP Configuration Parsing. [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 1 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 2 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 3 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 4 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 5 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 6 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 7 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 8 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 9 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 10 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 11 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 12 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 13 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 14 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 15 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 16 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 17 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 18 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 19 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 20 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 21 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 22 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 23 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 24 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 25 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 26 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 27 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 28 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 29 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 30 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 31 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 32 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 33 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 34 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 35 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 36 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 37 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 38 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 39 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 40 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 41 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 42 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 43 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 44 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 45 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 46 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 47 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 48 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 49 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 50 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 51 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 52 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 53 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 54 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 55 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 56 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 57 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 58 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 59 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 60 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 61 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 62 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 63 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 64 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 65 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 66 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 67 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 68 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 69 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 70 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 71 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 1 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 2 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 3 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 4 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 5 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 6 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 7 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 8 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 9 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 10 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 11 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 12 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 13 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 14 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 15 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 16 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 17 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 18 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 19 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 20 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 21 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 22 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 23 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 24 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 25 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 26 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 27 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 28 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 29 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 30 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 31 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 32 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 33 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 34 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 35 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 36 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 37 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 38 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 39 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 40 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 41 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 42 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 43 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 44 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 45 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 46 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 47 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 48 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 49 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 50 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 51 / 71 complete ... [31m[1mERROR [0m lib389:tasks.py:795 Error: index task index_all_11092020_212329 exited with -1 [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 52 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 53 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 54 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 55 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 56 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 57 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 58 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 59 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 60 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 61 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 62 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 63 / 71 complete ... [31m[1mERROR [0m lib389:tasks.py:795 Error: index task index_all_11092020_212331 exited with -1 [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 64 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 65 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 66 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 67 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 68 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 69 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 70 / 71 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 71 / 71 complete ... | |||
Passed | suites/openldap_2_389/migrate_test.py::test_migrate_openldap_slapdd_skip_elements | 14.89 | |
------------------------------Captured stdout call------------------------------ ==== migration plan ==== SchemaAttributeUnsupported -> ('otherMailbox',) SchemaAttributeUnsupported -> ('dSAQuality',) SchemaAttributeUnsupported -> ('singleLevelQuality',) SchemaAttributeUnsupported -> ('subtreeMinimumQuality',) SchemaAttributeUnsupported -> ('subtreeMaximumQuality',) SchemaAttributeCreate -> ('suseDefaultBase',) SchemaAttributeCreate -> ('suseNextUniqueId',) SchemaAttributeCreate -> ('suseMinUniqueId',) SchemaAttributeCreate -> ('suseMaxUniqueId',) SchemaAttributeCreate -> ('suseDefaultTemplate',) SchemaAttributeCreate -> ('suseSearchFilter',) SchemaAttributeCreate -> ('suseDefaultValue',) SchemaAttributeCreate -> ('suseNamingAttribute',) SchemaAttributeCreate -> ('suseSecondaryGroup',) SchemaAttributeCreate -> ('suseMinPasswordLength',) SchemaAttributeCreate -> ('suseMaxPasswordLength',) SchemaAttributeCreate -> ('susePasswordHash',) SchemaAttributeCreate -> ('suseSkelDir',) SchemaAttributeCreate -> ('susePlugin',) SchemaAttributeCreate -> ('suseMapAttribute',) SchemaAttributeCreate -> ('suseImapServer',) SchemaAttributeCreate -> ('suseImapAdmin',) SchemaAttributeCreate -> ('suseImapDefaultQuota',) SchemaAttributeCreate -> ('suseImapUseSsl',) SchemaClassUnsupported -> 0.9.2342.19200300.100.4.4 ('pilotPerson', 'newPilotPerson') may -> ('userid', 'textEncodedORAddress', 'rfc822Mailbox', 'favouriteDrink', 'roomNumber', 'userClass', 'homeTelephoneNumber', 'homePostalAddress', 'secretary', 'personalTitle', 'preferredDeliveryMethod', 'businessCategory', 'janetMailbox', 'otherMailbox', 'mobileTelephoneNumber', 'pagerTelephoneNumber', 'organizationalStatus', 'mailPreferenceOption', 'personalSignature') must -> () sup -> ('person',) SchemaClassInconsistent -> ( 0.9.2342.19200300.100.4.20 NAME 'pilotOrganization' SUP organization STRUCTURAL MAY buildingName X-ORIGIN 'user defined' ) to 0.9.2342.19200300.100.4.20 ('pilotOrganization',) may -> ('buildingName',) must -> () sup -> ('organization', 'organizationalUnit') SchemaClassUnsupported -> 0.9.2342.19200300.100.4.21 ('pilotDSA',) may -> ('dSAQuality',) must -> () sup -> ('dsa',) SchemaClassUnsupported -> 0.9.2342.19200300.100.4.22 ('qualityLabelledData',) may -> ('subtreeMinimumQuality', 'subtreeMaximumQuality') must -> ('dsaQuality',) sup -> ('top',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:2 ('suseModuleConfiguration',) may -> ('suseDefaultBase',) must -> ('cn',) sup -> ('top',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:3 ('suseUserConfiguration',) may -> ('suseMinPasswordLength', 'suseMaxPasswordLength', 'susePasswordHash', 'suseSkelDir', 'suseNextUniqueId', 'suseMinUniqueId', 'suseMaxUniqueId', 'suseDefaultTemplate', 'suseSearchFilter', 'suseMapAttribute') must -> () sup -> ('suseModuleConfiguration',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:4 ('suseObjectTemplate',) may -> ('susePlugin', 'suseDefaultValue', 'suseNamingAttribute') must -> ('cn',) sup -> ('top',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:5 ('suseUserTemplate',) may -> ('suseSecondaryGroup',) must -> ('cn',) sup -> ('suseObjectTemplate',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:6 ('suseGroupTemplate',) may -> () must -> ('cn',) sup -> ('suseObjectTemplate',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:7 ('suseGroupConfiguration',) may -> ('suseNextUniqueId', 'suseMinUniqueId', 'suseMaxUniqueId', 'suseDefaultTemplate', 'suseSearchFilter', 'suseMapAttribute') must -> () sup -> ('suseModuleConfiguration',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:8 ('suseCaConfiguration',) may -> () must -> () sup -> ('suseModuleConfiguration',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:9 ('suseDnsConfiguration',) may -> () must -> () sup -> ('suseModuleConfiguration',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:10 ('suseDhcpConfiguration',) may -> () must -> () sup -> ('suseModuleConfiguration',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:11 ('suseMailConfiguration',) may -> () must -> ('suseImapServer', 'suseImapAdmin', 'suseImapDefaultQuota', 'suseImapUseSsl') sup -> ('suseModuleConfiguration',) DatabaseReindex -> dc=example,dc=com PluginMemberOfEnable PluginMemberOfScope -> dc=example,dc=com PluginMemberOfFixup -> dc=example,dc=com PluginRefintEnable PluginRefintAttributes -> member PluginRefintAttributes -> memberOf PluginRefintScope -> dc=example,dc=com PluginUniqueConfigure -> dc=example,dc=com, mail 401a528e-eaf5-1039-8667-dbfbf2f5e6dd PluginUniqueConfigure -> dc=example,dc=com, uid 401a528e-eaf5-1039-8667-dbfbf2f5e6dd DatabaseReindex -> dc=example,dc=net PluginMemberOfEnable PluginMemberOfScope -> dc=example,dc=net PluginMemberOfFixup -> dc=example,dc=net PluginUniqueConfigure -> dc=example,dc=net, mail 401a7084-eaf5-1039-866c-dbfbf2f5e6dd PluginUniqueConfigure -> dc=example,dc=net, uid 401a7084-eaf5-1039-866c-dbfbf2f5e6dd DatabaseLdifImport -> dc=example,dc=com /export/tests/suites/openldap_2_389/../../data/openldap_2_389/1/example_com.slapcat.ldif ==== end migration plan ==== -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.migrate.openldap.config:config.py:264 Examining OpenLDAP Configuration ... [32mINFO [0m lib389.migrate.openldap.config:config.py:285 Completed OpenLDAP Configuration Parsing. [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 1 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 2 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 3 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 4 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 5 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 6 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 7 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 8 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 9 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 10 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 11 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 12 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 13 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 14 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 15 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 16 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 17 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 18 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 19 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 20 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 21 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 22 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 23 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 24 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 25 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 26 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 27 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 28 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 29 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 30 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 31 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 32 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 33 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 34 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 35 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 36 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 37 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 38 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 39 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 40 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 41 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 42 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 43 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 44 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 45 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 46 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 47 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 48 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 49 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 50 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 51 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 52 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 53 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 54 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:656 migration: 55 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 1 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 2 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 3 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 4 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 5 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 6 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 7 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 8 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 9 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 10 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 11 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 12 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 13 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 14 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 15 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 16 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 17 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 18 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 19 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 20 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 21 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 22 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 23 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 24 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 25 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 26 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 27 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 28 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 29 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 30 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 31 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 32 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 33 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 34 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 35 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 36 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 37 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 38 / 55 complete ... [31m[1mERROR [0m lib389:tasks.py:795 Error: index task index_all_11092020_212344 exited with -1 [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 39 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 40 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 41 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 42 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 43 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 44 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 45 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 46 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 47 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 48 / 55 complete ... [31m[1mERROR [0m lib389:tasks.py:795 Error: index task index_all_11092020_212346 exited with -1 [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 49 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 50 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 51 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 52 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 53 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 54 / 55 complete ... [32mINFO [0m lib389.migrate.plan:plan.py:663 post: 55 / 55 complete ... | |||
Passed | suites/openldap_2_389/password_migrate_test.py::test_migrate_openldap_password_hash | 0.61 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/paged_results/paged_results_test.py::test_search_success[6-5] | 0.90 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:56 Adding user simplepaged_test -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 5 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:247 Set user bind simplepaged_test [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 6; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7faa551a1220>]. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:253 5 results [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 5 users | |||
Passed | suites/paged_results/paged_results_test.py::test_search_success[5-5] | 0.38 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 5 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:247 Set user bind simplepaged_test [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 5; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7faa55833250>]. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:253 5 results [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 5 users | |||
Passed | suites/paged_results/paged_results_test.py::test_search_success[5-25] | 1.42 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 25 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:247 Set user bind simplepaged_test [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 5; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7faa558201c0>]. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:253 25 results [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 25 users | |||
Passed | suites/paged_results/paged_results_test.py::test_search_limits_fail[50-200-cn=config,cn=ldbm database,cn=plugins,cn=config-nsslapd-idlistscanlimit-100-UNWILLING_TO_PERFORM] | 11.35 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 200 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-idlistscanlimit to 100. Previous value - b'4000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:299 Set user bind [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:302 Create simple paged results control instance [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:308 Initiate ldapsearch with created control instance [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:320 Getting page 0 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 200 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-idlistscanlimit to b'4000'. Previous value - b'100'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. | |||
Passed | suites/paged_results/paged_results_test.py::test_search_limits_fail[5-15-cn=config-nsslapd-timelimit-20-UNAVAILABLE_CRITICAL_EXTENSION] | 31.20 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 15 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-timelimit to 20. Previous value - b'3600'. Modified suffix - cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:299 Set user bind [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:302 Create simple paged results control instance [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:308 Initiate ldapsearch with created control instance [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:320 Getting page 0 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 15 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-timelimit to b'3600'. Previous value - b'20'. Modified suffix - cn=config. | |||
Passed | suites/paged_results/paged_results_test.py::test_search_limits_fail[21-50-cn=config-nsslapd-sizelimit-20-SIZELIMIT_EXCEEDED] | 3.07 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 50 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-sizelimit to 20. Previous value - b'2000'. Modified suffix - cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:299 Set user bind [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:302 Create simple paged results control instance [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:308 Initiate ldapsearch with created control instance [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:320 Getting page 0 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 50 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-sizelimit to b'2000'. Previous value - b'20'. Modified suffix - cn=config. | |||
Passed | suites/paged_results/paged_results_test.py::test_search_limits_fail[21-50-cn=config-nsslapd-pagedsizelimit-5-SIZELIMIT_EXCEEDED] | 3.06 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 50 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to 5. Previous value - b'0'. Modified suffix - cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:299 Set user bind [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:302 Create simple paged results control instance [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:308 Initiate ldapsearch with created control instance [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:320 Getting page 0 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 50 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to b'0'. Previous value - b'5'. Modified suffix - cn=config. | |||
Passed | suites/paged_results/paged_results_test.py::test_search_limits_fail[5-50-cn=config,cn=ldbm database,cn=plugins,cn=config-nsslapd-lookthroughlimit-20-ADMINLIMIT_EXCEEDED] | 3.39 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 50 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-lookthroughlimit to 20. Previous value - b'5000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:299 Set user bind [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:302 Create simple paged results control instance [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:308 Initiate ldapsearch with created control instance [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:320 Getting page 0 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 50 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-lookthroughlimit to b'5000'. Previous value - b'20'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. | |||
Passed | suites/paged_results/paged_results_test.py::test_search_sort_success | 2.81 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 50 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:378 Initiate ldapsearch with created control instance [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:379 Collect data with sorting [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 5; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7faa46367850>, <lib389._controls.SSSRequestControl object at 0x7faa463678b0>]. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 6 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 7 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 8 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 9 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:384 Substring numbers from user DNs [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:388 Assert that list is sorted [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 50 users | |||
Passed | suites/paged_results/paged_results_test.py::test_search_abandon | 5.72 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 10 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:419 Set user bind [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:422 Create simple paged results control instance [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:426 Initiate a search with a paged results control [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:429 Abandon the search [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:432 Expect an ldap.TIMEOUT exception, while trying to get the search results [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 10 users | |||
Passed | suites/paged_results/paged_results_test.py::test_search_with_timelimit | 36.19 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 100 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:468 Set user bind [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:471 Create simple paged results control instance [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:476 Iteration 0 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:483 Getting page 0 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:483 Getting page 1 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:499 Done with this search - sleeping 10 seconds [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:476 Iteration 1 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:483 Getting page 0 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:483 Getting page 1 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:499 Done with this search - sleeping 10 seconds [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:476 Iteration 2 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:483 Getting page 0 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:483 Getting page 1 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:499 Done with this search - sleeping 10 seconds [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 100 users | |||
Passed | suites/paged_results/paged_results_test.py::test_search_dns_ip_aci[fqdn] | 6.26 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 100 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:547 Back up current suffix ACI [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:550 Add test ACI [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:556 Set user bind [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:559 Create simple paged results control instance [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:563 Initiate three searches with a paged results control [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:565 1 search [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 5; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7faa45ad7670>]. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 6 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 7 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 8 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 9 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 10 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 11 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 12 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 13 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 14 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 15 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 16 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 17 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 18 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 19 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:568 100 results [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:565 2 search [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 5; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7faa45ad7670>]. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 6 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 7 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 8 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 9 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 10 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 11 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 12 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 13 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 14 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 15 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 16 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 17 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 18 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 19 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:568 100 results [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:565 3 search [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 5; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7faa45ad7670>]. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 6 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 7 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 8 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 9 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 10 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 11 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 12 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 13 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 14 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 15 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 16 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 17 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 18 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 19 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:568 100 results [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:570 If we are here, then no error has happened. We are good. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:573 Restore ACI [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 100 users | |||
Passed | suites/paged_results/paged_results_test.py::test_search_dns_ip_aci[ip] | 6.66 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 100 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:547 Back up current suffix ACI [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:550 Add test ACI [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:556 Set user bind [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:559 Create simple paged results control instance [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:563 Initiate three searches with a paged results control [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:565 1 search [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 5; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7faa550e4ca0>]. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 6 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 7 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 8 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 9 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 10 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 11 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 12 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 13 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 14 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 15 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 16 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 17 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 18 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 19 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:568 100 results [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:565 2 search [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 5; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7faa550e4ca0>]. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 6 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 7 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 8 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 9 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 10 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 11 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 12 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 13 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 14 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 15 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 16 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 17 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 18 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 19 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:568 100 results [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:565 3 search [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 5; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7faa550e4ca0>]. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 6 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 7 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 8 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 9 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 10 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 11 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 12 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 13 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 14 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 15 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 16 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 17 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 18 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 19 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:568 100 results [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:570 If we are here, then no error has happened. We are good. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:573 Restore ACI [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 100 users | |||
Passed | suites/paged_results/paged_results_test.py::test_search_multiple_paging | 6.23 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 100 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:606 Set user bind [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:609 Create simple paged results control instance [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:614 Iteration 0 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:614 Iteration 1 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:614 Iteration 2 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 100 users | |||
Passed | suites/paged_results/paged_results_test.py::test_search_invalid_cookie[1000] | 5.74 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 100 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:661 Set user bind [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:664 Create simple paged results control instance [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:672 Put an invalid cookie (1000) to the control. TypeError is expected [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 100 users | |||
Passed | suites/paged_results/paged_results_test.py::test_search_invalid_cookie[-1] | 6.06 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 100 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:661 Set user bind [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:664 Create simple paged results control instance [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:672 Put an invalid cookie (-1) to the control. TypeError is expected [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 100 users | |||
Passed | suites/paged_results/paged_results_test.py::test_search_abandon_with_zero_size | 0.63 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 10 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:705 Set user bind [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:708 Create simple paged results control instance [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 10 users | |||
Passed | suites/paged_results/paged_results_test.py::test_search_pagedsizelimit_success | 0.66 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to 20. Previous value - b'0'. Modified suffix - cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 10 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:754 Set user bind [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 10; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7faa4637a460>]. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:762 10 results [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 10 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to b'0'. Previous value - b'20'. Modified suffix - cn=config. | |||
Passed | suites/paged_results/paged_results_test.py::test_search_nspagedsizelimit[5-15-PASS] | 0.92 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 10 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to 5. Previous value - b'0'. Modified suffix - cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedSizeLimit to 15. Previous value - None. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:817 Set user bind [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:828 Expect to pass [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 10; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7faa4636ddf0>]. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:830 10 results [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 10 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to b'0'. Previous value - b'5'. Modified suffix - cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedSizeLimit to None. Previous value - b'15'. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com. | |||
Passed | suites/paged_results/paged_results_test.py::test_search_nspagedsizelimit[15-5-SIZELIMIT_EXCEEDED] | 0.69 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 10 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to 15. Previous value - b'0'. Modified suffix - cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedSizeLimit to 5. Previous value - None. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:817 Set user bind [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:824 Expect to fail with SIZELIMIT_EXCEEDED [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 10; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7faa463f9400>]. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 10 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to b'0'. Previous value - b'15'. Modified suffix - cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedSizeLimit to None. Previous value - b'5'. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com. | |||
Passed | suites/paged_results/paged_results_test.py::test_search_paged_limits[conf_attr_values0-ADMINLIMIT_EXCEEDED] | 6.33 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 101 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-sizelimit to 5000. Previous value - b'2000'. Modified suffix - cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to 5000. Previous value - b'0'. Modified suffix - cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-idlistscanlimit to 100. Previous value - b'4000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-lookthroughlimit to 100. Previous value - b'5000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:889 Set user bind [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:896 Expect to fail with ADMINLIMIT_EXCEEDED [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 10; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7faa550e4b20>]. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 6 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 7 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 101 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-sizelimit to b'2000'. Previous value - b'5000'. Modified suffix - cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to b'0'. Previous value - b'5000'. Modified suffix - cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-lookthroughlimit to b'5000'. Previous value - b'100'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-idlistscanlimit to b'4000'. Previous value - b'100'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. | |||
Passed | suites/paged_results/paged_results_test.py::test_search_paged_user_limits[conf_attr_values0-ADMINLIMIT_EXCEEDED] | 6.11 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 101 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-lookthroughlimit to 1000. Previous value - b'5000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-idlistscanlimit to 1000. Previous value - b'4000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedIDListScanLimit to 100. Previous value - None. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedLookthroughLimit to 100. Previous value - None. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:963 Set user bind [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:970 Expect to fail with ADMINLIMIT_EXCEEDED [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 10; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7faa45b75b80>]. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 6 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 7 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 101 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-lookthroughlimit to b'5000'. Previous value - b'1000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-idlistscanlimit to b'4000'. Previous value - b'1000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedIDListScanLimit to None. Previous value - b'100'. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedLookthroughLimit to None. Previous value - b'100'. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com. | |||
Passed | suites/paged_results/paged_results_test.py::test_ger_basic | 1.44 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 20 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 4; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7faa4604c6d0>, <ldap.controls.simple.GetEffectiveRightsControl object at 0x7faa45afba60>]. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:1014 20 results [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:1016 Check for attributeLevelRights [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:1019 Remove added users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 20 users | |||
Passed | suites/paged_results/paged_results_test.py::test_multi_suffix_search | 10.29 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:87 Adding suffix:o=test_parent and backend: parent_base [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:96 Adding ACI to allow our test user to search [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:108 Adding suffix:ou=child,o=test_parent and backend: child_base -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:1050 Clear the access log [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 10 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 10 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: o=test_parent; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 4; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7faa5520a580>]. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:1061 20 results [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:1064 Restart the server to flush the logs [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:1070 Assert that last pr_cookie == -1 and others pr_cookie == 0 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:1075 Remove added users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 10 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 10 users | |||
Passed | suites/paged_results/paged_results_test.py::test_maxsimplepaged_per_conn_success[None] | 1.59 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 20 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:1108 Set user bind [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 4; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7faa46367940>]. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:1115 20 results [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:1118 Remove added users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 20 users | |||
Passed | suites/paged_results/paged_results_test.py::test_maxsimplepaged_per_conn_success[-1] | 1.95 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 20 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-maxsimplepaged-per-conn to -1. Previous value - b'-1'. Modified suffix - cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:1108 Set user bind [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 4; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7faa463539a0>]. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:1115 20 results [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:1118 Remove added users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 20 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-maxsimplepaged-per-conn to b'-1'. Previous value - b'-1'. Modified suffix - cn=config. | |||
Passed | suites/paged_results/paged_results_test.py::test_maxsimplepaged_per_conn_success[1000] | 1.70 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 20 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-maxsimplepaged-per-conn to 1000. Previous value - b'-1'. Modified suffix - cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:1108 Set user bind [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 4; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7faa46358b50>]. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:1115 20 results [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:1118 Remove added users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 20 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-maxsimplepaged-per-conn to b'-1'. Previous value - b'1000'. Modified suffix - cn=config. | |||
Passed | suites/paged_results/paged_results_test.py::test_maxsimplepaged_per_conn_failure[0] | 1.80 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 20 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-maxsimplepaged-per-conn to 0. Previous value - b'-1'. Modified suffix - cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:1153 Set user bind [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:1156 Create simple paged results control instance [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:1171 Remove added users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 20 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-maxsimplepaged-per-conn to b'-1'. Previous value - b'0'. Modified suffix - cn=config. | |||
Passed | suites/paged_results/paged_results_test.py::test_maxsimplepaged_per_conn_failure[1] | 1.66 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 20 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-maxsimplepaged-per-conn to 1. Previous value - b'-1'. Modified suffix - cn=config. [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:1153 Set user bind [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:1156 Create simple paged results control instance [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:1171 Remove added users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 20 users [32mINFO [0m tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-maxsimplepaged-per-conn to b'-1'. Previous value - b'1'. Modified suffix - cn=config. | |||
Passed | suites/password/password_policy_test.py::test_password_change_section | 1.78 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/password/password_policy_test.py::test_password_syntax_section | 3.90 | |
No log output captured. | |||
Passed | suites/password/password_policy_test.py::test_password_history_section | 8.83 | |
No log output captured. | |||
Passed | suites/password/password_policy_test.py::test_password_minimum_age_section | 11.32 | |
No log output captured. | |||
Passed | suites/password/password_policy_test.py::test_account_lockout_and_lockout_duration_section | 6.48 | |
No log output captured. | |||
Passed | suites/password/password_policy_test.py::test_grace_limit_section | 18.27 | |
No log output captured. | |||
Passed | suites/password/password_policy_test.py::test_additional_corner_cases | 1.30 | |
No log output captured. | |||
Passed | suites/password/password_test.py::test_password_delete_specific_password | 0.08 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.password_test:password_test.py:44 Running test_password_delete_specific_password... [32mINFO [0m tests.suites.password.password_test:password_test.py:65 test_password_delete_specific_password: PASSED | |||
Passed | suites/password/pbkdf2_upgrade_plugin_test.py::test_pbkdf2_upgrade | 9.95 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/password/pwdAdmin_test.py::test_pwdAdmin_bypass | 0.22 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m tests.suites.password.pwdAdmin_test:pwdAdmin_test.py:46 test_pwdAdmin_init: Creating Password Administrator entries... [32mINFO [0m tests.suites.password.pwdAdmin_test:pwdAdmin_test.py:85 test_pwdAdmin_init: Configuring password policy... [32mINFO [0m tests.suites.password.pwdAdmin_test:pwdAdmin_test.py:100 Add aci to allow password admin to add/update entries... [32mINFO [0m tests.suites.password.pwdAdmin_test:pwdAdmin_test.py:115 test_pwdAdmin_init: Bind as the Password Administrator (before activating)... [32mINFO [0m tests.suites.password.pwdAdmin_test:pwdAdmin_test.py:128 test_pwdAdmin_init: Attempt to add entries with invalid passwords, these adds should fail... -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwdAdmin_test:pwdAdmin_test.py:167 test_pwdAdmin: Activate the Password Administator... | |||
Passed | suites/password/pwdAdmin_test.py::test_pwdAdmin_no_admin | 0.12 | |
No log output captured. | |||
Passed | suites/password/pwdAdmin_test.py::test_pwdAdmin_modify | 0.18 | |
No log output captured. | |||
Passed | suites/password/pwdAdmin_test.py::test_pwdAdmin_group | 0.17 | |
No log output captured. | |||
Passed | suites/password/pwdAdmin_test.py::test_pwdAdmin_config_validation | 0.02 | |
No log output captured. | |||
Passed | suites/password/pwdModify_test.py::test_pwd_modify_with_different_operation | 13.65 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwdModify_test:pwdModify_test.py:101 Attempt for Password change for an entry that does not exists [32mINFO [0m tests.suites.password.pwdModify_test:pwdModify_test.py:103 Attempt for Password change for an entry that exists [32mINFO [0m tests.suites.password.pwdModify_test:pwdModify_test.py:105 Attempt for Password change to old for an entry that exists [32mINFO [0m tests.suites.password.pwdModify_test:pwdModify_test.py:107 Attempt for Password Change with Binddn as testuser but with wrong old password [32mINFO [0m tests.suites.password.pwdModify_test:pwdModify_test.py:111 Attempt for Password Change with Binddn as testuser [32mINFO [0m tests.suites.password.pwdModify_test:pwdModify_test.py:113 Attempt for Password Change without giving newpassword [32mINFO [0m tests.suites.password.pwdModify_test:pwdModify_test.py:116 Change password to NEW_PASSWD i.e newpassword [32mINFO [0m tests.suites.password.pwdModify_test:pwdModify_test.py:119 Check binding with old/new password [32mINFO [0m tests.suites.password.pwdModify_test:pwdModify_test.py:124 Change password back to OLD_PASSWD i.e password [32mINFO [0m tests.suites.password.pwdModify_test:pwdModify_test.py:127 Checking password change Operation using a Non-Secure connection [32mINFO [0m tests.suites.password.pwdModify_test:pwdModify_test.py:131 Testuser attempts to change password for testuser2(userPassword attribute is Set) [32mINFO [0m tests.suites.password.pwdModify_test:pwdModify_test.py:147 Directory Manager attempts to change password for testuser2(userPassword attribute is Set) [32mINFO [0m tests.suites.password.pwdModify_test:pwdModify_test.py:150 Changing userPassword attribute to Undefined for testuser2 [32mINFO [0m tests.suites.password.pwdModify_test:pwdModify_test.py:152 Testuser attempts to change password for testuser2(userPassword attribute is Undefined) [32mINFO [0m tests.suites.password.pwdModify_test:pwdModify_test.py:156 Directory Manager attempts to change password for testuser2(userPassword attribute is Undefined) [32mINFO [0m tests.suites.password.pwdModify_test:pwdModify_test.py:159 Create a password syntax policy. Attempt to change to password that violates that policy [32mINFO [0m tests.suites.password.pwdModify_test:pwdModify_test.py:163 Reset password syntax policy [32mINFO [0m tests.suites.password.pwdModify_test:pwdModify_test.py:165 userPassword mod with control results in ber decode error [32mINFO [0m tests.suites.password.pwdModify_test:pwdModify_test.py:169 Reseting the testuser's password | |||
Passed | suites/password/pwdModify_test.py::test_pwd_modify_with_password_policy | 0.16 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.password.pwdModify_test:pwdModify_test.py:44 Change the pwd storage type to clear and change the password once to refresh it(for the rest of tests -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwdModify_test:pwdModify_test.py:192 Change the password and check that a new entry has been added to the history [32mINFO [0m tests.suites.password.pwdModify_test:pwdModify_test.py:196 Try changing password to one stored in history. Should fail [32mINFO [0m tests.suites.password.pwdModify_test:pwdModify_test.py:199 Change the password several times in a row, and try binding after each change | |||
Passed | suites/password/pwdModify_test.py::test_pwd_modify_with_subsuffix | 0.13 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwdModify_test:pwdModify_test.py:228 Add a new SubSuffix [32mINFO [0m tests.suites.password.pwdModify_test:pwdModify_test.py:235 Add the container & create password policies [32mINFO [0m tests.suites.password.pwdModify_test:pwdModify_test.py:243 Add two New users under the SubEntry [32mINFO [0m tests.suites.password.pwdModify_test:pwdModify_test.py:265 Changing password of uid=test_user0,ou=TestPeople_bug834047,dc=example,dc=com to newpassword [32mINFO [0m tests.suites.password.pwdModify_test:pwdModify_test.py:270 Try to delete password- case when password is specified [32mINFO [0m tests.suites.password.pwdModify_test:pwdModify_test.py:274 Try to delete password- case when password is not specified | |||
Passed | suites/password/pwdPolicy_attribute_test.py::test_pwd_reset | 1.72 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:39 Adding test user {} | |||
Passed | suites/password/pwdPolicy_attribute_test.py::test_change_pwd[on-off-UNWILLING_TO_PERFORM] | 2.21 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:66 Create password policy for subtree ou=people,dc=example,dc=com [32mINFO [0m tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:69 Create password policy for user uid=simplepaged_test,ou=people,dc=example,dc=com -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:154 Set passwordChange to "on" - ou=people,dc=example,dc=com [32mINFO [0m tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:161 Set passwordChange to "off" - uid=simplepaged_test,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:170 Bind as user and modify userPassword [32mINFO [0m tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:182 Bind as DM | |||
Passed | suites/password/pwdPolicy_attribute_test.py::test_change_pwd[off-off-UNWILLING_TO_PERFORM] | 2.12 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:154 Set passwordChange to "off" - ou=people,dc=example,dc=com [32mINFO [0m tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:161 Set passwordChange to "off" - uid=simplepaged_test,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:170 Bind as user and modify userPassword [32mINFO [0m tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:182 Bind as DM | |||
Passed | suites/password/pwdPolicy_attribute_test.py::test_change_pwd[off-on-False] | 2.14 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:154 Set passwordChange to "off" - ou=people,dc=example,dc=com [32mINFO [0m tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:161 Set passwordChange to "on" - uid=simplepaged_test,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:170 Bind as user and modify userPassword [32mINFO [0m tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:182 Bind as DM | |||
Passed | suites/password/pwdPolicy_attribute_test.py::test_change_pwd[on-on-False] | 2.16 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:154 Set passwordChange to "on" - ou=people,dc=example,dc=com [32mINFO [0m tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:161 Set passwordChange to "on" - uid=simplepaged_test,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:170 Bind as user and modify userPassword [32mINFO [0m tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:182 Bind as DM | |||
Passed | suites/password/pwdPolicy_attribute_test.py::test_pwd_min_age | 14.32 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:219 Set passwordminage to "10" - ou=people,dc=example,dc=com [32mINFO [0m tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:224 Set passwordminage to "10" - uid=simplepaged_test,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:228 Set passwordminage to "10" - cn=config [32mINFO [0m tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:233 Bind as user and modify userPassword [32mINFO [0m tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:239 Bind as user and modify userPassword straight away after previous change [32mINFO [0m tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:244 Wait 12 second [32mINFO [0m tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:248 Bind as user and modify userPassword [32mINFO [0m tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:256 Bind as DM | |||
Passed | suites/password/pwdPolicy_controls_test.py::test_pwd_must_change | 2.21 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:109 Configure password policy with paswordMustChange set to "on" [32mINFO [0m tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:120 Reset userpassword as Directory Manager [32mINFO [0m tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:125 Bind should return ctrl with error code 2 (changeAfterReset) | |||
Passed | suites/password/pwdPolicy_controls_test.py::test_pwd_expired_grace_limit | 6.38 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:159 Configure password policy with grace limit set tot 2 [32mINFO [0m tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:164 Change password and wait for it to expire [32mINFO [0m tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:168 Bind and use up one grace login (only one left) [32mINFO [0m tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:178 Use up last grace login, should get control [32mINFO [0m tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:184 No grace login available, bind should fail, and no control should be returned | |||
Passed | suites/password/pwdPolicy_controls_test.py::test_pwd_expiring_with_warning | 5.51 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:206 Configure password policy [32mINFO [0m tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:211 Change password and get controls [32mINFO [0m tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:222 Warning has been sent, try the bind again, and recheck the expiring time | |||
Passed | suites/password/pwdPolicy_controls_test.py::test_pwd_expiring_with_no_warning | 6.37 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:250 Configure password policy [32mINFO [0m tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:255 When the warning is less than the max age, we never send expiring control response [32mINFO [0m tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:262 Turn on sending expiring control regardless of warning [32mINFO [0m tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:274 Check expiring time again [32mINFO [0m tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:285 Turn off sending expiring control (restore the default setting) | |||
Passed | suites/password/pwdPolicy_inherit_global_test.py::test_entry_has_no_restrictions[off-off] | 1.10 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:39 Adding user uid=buser,ou=People,dc=example,dc=com [32mINFO [0m tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:46 Adding an aci for the bind user [32mINFO [0m tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:71 Enable fine-grained policy [32mINFO [0m tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:92 Default value of nsslapd-pwpolicy-inherit-global is off [32mINFO [0m tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:92 Default value of passwordCheckSyntax is off -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:119 Set nsslapd-pwpolicy-inherit-global to off [32mINFO [0m tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:120 Set passwordCheckSyntax to off [32mINFO [0m tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:92 Default value of nsslapd-pwpolicy-inherit-global is off [32mINFO [0m tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:92 Default value of passwordCheckSyntax is off [32mINFO [0m tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:129 Bind as test user [32mINFO [0m tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:132 Make sure an entry added to ou=people has no password syntax restrictions. | |||
Passed | suites/password/pwdPolicy_inherit_global_test.py::test_entry_has_no_restrictions[on-off] | 1.08 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:119 Set nsslapd-pwpolicy-inherit-global to on [32mINFO [0m tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:120 Set passwordCheckSyntax to off [32mINFO [0m tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:92 Default value of nsslapd-pwpolicy-inherit-global is on [32mINFO [0m tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:92 Default value of passwordCheckSyntax is off [32mINFO [0m tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:129 Bind as test user [32mINFO [0m tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:132 Make sure an entry added to ou=people has no password syntax restrictions. | |||
Passed | suites/password/pwdPolicy_inherit_global_test.py::test_entry_has_no_restrictions[off-on] | 1.07 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:119 Set nsslapd-pwpolicy-inherit-global to off [32mINFO [0m tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:120 Set passwordCheckSyntax to on [32mINFO [0m tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:92 Default value of nsslapd-pwpolicy-inherit-global is off [32mINFO [0m tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:92 Default value of passwordCheckSyntax is on [32mINFO [0m tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:129 Bind as test user [32mINFO [0m tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:132 Make sure an entry added to ou=people has no password syntax restrictions. | |||
Passed | suites/password/pwdPolicy_inherit_global_test.py::test_entry_has_restrictions | 1.07 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:173 Set nsslapd-pwpolicy-inherit-global to on [32mINFO [0m tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:174 Set passwordCheckSyntax to on [32mINFO [0m tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:92 Default value of nsslapd-pwpolicy-inherit-global is on [32mINFO [0m tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:92 Default value of passwordCheckSyntax is on [32mINFO [0m tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:187 Bind as test user [32mINFO [0m tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:192 Try to add user with a short password (<9) [32mINFO [0m tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:197 Try to add user with a long password (>9) [32mINFO [0m tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:201 Bind as DM user | |||
Passed | suites/password/pwdPolicy_syntax_test.py::test_basic | 6.08 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:34 Enable global password policy. Check for syntax. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMinLength: length too short [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMinDigits: does not contain minimum number of digits [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMinAlphas: does not contain minimum number of alphas [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMaxRepeats: too many repeating characters [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMinSpecials: does not contain minimum number of special characters [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMinLowers: does not contain minimum number of lowercase characters [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMinUppers: does not contain minimum number of lowercase characters [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordDictCheck: Password found in dictionary [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordPalindrome: Password is palindrome [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMaxSequence: Max monotonic sequence is not allowed [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMaxSequence: Max monotonic sequence is not allowed [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMaxSequence: Max monotonic sequence is not allowed [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMaxSequence: Max monotonic sequence is not allowed [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMaxSeqSets: Max monotonic sequence is not allowed [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMaxClassChars: Too may consecutive characters from the same class [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMaxClassChars: Too may consecutive characters from the same class [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMaxClassChars: Too may consecutive characters from the same class [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMaxClassChars: Too may consecutive characters from the same class [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordBadWords: Too may consecutive characters from the same class [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordUserAttributes: Password found in user entry | |||
Passed | suites/password/pwdPolicy_syntax_test.py::test_config_set_few_user_attributes | 6.69 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:pwdPolicy_syntax_test.py:315 Set passwordUserAttributes to "description loginShell" [32mINFO [0m lib389:pwdPolicy_syntax_test.py:319 Verify passwordUserAttributes has the values [32mINFO [0m lib389:pwdPolicy_syntax_test.py:323 Reset passwordUserAttributes [32mINFO [0m lib389:pwdPolicy_syntax_test.py:326 Verify passwordUserAttributes enforced the policy [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordUserAttributes: Password found in user entry [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordUserAttributes: Password found in user entry [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordUserAttributes: Password found in user entry [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordUserAttributes: Password found in user entry [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordUserAttributes: Password found in user entry [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordUserAttributes: Password found in user entry [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordUserAttributes: Password found in user entry [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordUserAttributes: Password found in user entry [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordUserAttributes: Password found in user entry | |||
Passed | suites/password/pwdPolicy_syntax_test.py::test_config_set_few_bad_words | 7.45 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:pwdPolicy_syntax_test.py:355 Set passwordBadWords to "fedora redhat" [32mINFO [0m lib389:pwdPolicy_syntax_test.py:360 Verify passwordBadWords has the values [32mINFO [0m lib389:pwdPolicy_syntax_test.py:364 Reset passwordBadWords [32mINFO [0m lib389:pwdPolicy_syntax_test.py:367 Verify passwordBadWords enforced the policy [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordBadWords: Too may consecutive characters from the same class [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordBadWords: Too may consecutive characters from the same class [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordBadWords: Too may consecutive characters from the same class [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordBadWords: Too may consecutive characters from the same class [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordBadWords: Too may consecutive characters from the same class [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordBadWords: Too may consecutive characters from the same class [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordBadWords: Too may consecutive characters from the same class [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordBadWords: Too may consecutive characters from the same class [32mINFO [0m tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordBadWords: Too may consecutive characters from the same class | |||
Passed | suites/password/pwdPolicy_token_test.py::test_token_lengths | 3.42 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_token_test:pwdPolicy_token_test.py:67 Testing password len 4 token (test) [32mINFO [0m tests.suites.password.pwdPolicy_token_test:pwdPolicy_token_test.py:72 Password correctly rejected: {'msgtype': 103, 'msgid': 12, 'result': 19, 'desc': 'Constraint violation', 'ctrls': [], 'info': 'invalid password syntax - password based off of user entry'} [32mINFO [0m tests.suites.password.pwdPolicy_token_test:pwdPolicy_token_test.py:67 Testing password len 6 token (test_u) [32mINFO [0m tests.suites.password.pwdPolicy_token_test:pwdPolicy_token_test.py:72 Password correctly rejected: {'msgtype': 103, 'msgid': 16, 'result': 19, 'desc': 'Constraint violation', 'ctrls': [], 'info': 'invalid password syntax - password based off of user entry'} [32mINFO [0m tests.suites.password.pwdPolicy_token_test:pwdPolicy_token_test.py:67 Testing password len 10 token (test_user1) [32mINFO [0m tests.suites.password.pwdPolicy_token_test:pwdPolicy_token_test.py:72 Password correctly rejected: {'msgtype': 103, 'msgid': 20, 'result': 19, 'desc': 'Constraint violation', 'ctrls': [], 'info': 'invalid password syntax - password based off of user entry'} | |||
Passed | suites/password/pwdPolicy_warning_test.py::test_different_values[ ] | 0.26 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:205 Get the default value [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:209 An invalid value is being tested [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:171 Setting passwordSendExpiringTime to [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:213 Now check the value is unchanged [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:216 Invalid value was rejected correctly | |||
Passed | suites/password/pwdPolicy_warning_test.py::test_different_values[junk123] | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:205 Get the default value [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:209 An invalid value is being tested [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:171 Setting passwordSendExpiringTime to junk123 [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:213 Now check the value is unchanged [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:216 Invalid value junk123 was rejected correctly | |||
Passed | suites/password/pwdPolicy_warning_test.py::test_different_values[on] | 1.28 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:205 Get the default value [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:218 A valid value is being tested [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:171 Setting passwordSendExpiringTime to on [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:221 Now check that the value has been changed [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:224 passwordSendExpiringTime is now set to on [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:226 Set passwordSendExpiringTime back to the default value [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:171 Setting passwordSendExpiringTime to off | |||
Passed | suites/password/pwdPolicy_warning_test.py::test_different_values[off] | 1.03 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:205 Get the default value [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:218 A valid value is being tested [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:171 Setting passwordSendExpiringTime to off [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:221 Now check that the value has been changed [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:224 passwordSendExpiringTime is now set to off [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:226 Set passwordSendExpiringTime back to the default value [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:171 Setting passwordSendExpiringTime to off | |||
Passed | suites/password/pwdPolicy_warning_test.py::test_expiry_time | 0.05 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:46 Get the default values [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:53 Set the new values [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:113 Add the user -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:257 Get the password expiry warning time [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:258 Binding with (uid=tuser,ou=people,dc=example,dc=com) and requesting the password expiry warning time [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:158 Bind with the user and request the password expiry warning time [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:262 Check whether the time is returned [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:265 user's password will expire in 172800 seconds [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:268 Rebinding as DM | |||
Passed | suites/password/pwdPolicy_warning_test.py::test_password_warning[passwordSendExpiringTime-off] | 0.56 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:46 Get the default values [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:53 Set the new values [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:113 Add the user -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:300 Set configuration parameter [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:171 Setting passwordSendExpiringTime to off [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:303 Binding with (uid=tuser,ou=people,dc=example,dc=com) and requesting password expiry warning time [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:158 Bind with the user and request the password expiry warning time [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:307 Check the state of the control [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:309 Password Expiry warning time is not returned as passwordSendExpiringTime is set to off [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:315 Rebinding as DM | |||
Passed | suites/password/pwdPolicy_warning_test.py::test_password_warning[passwordWarning-3600] | 0.58 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:46 Get the default values [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:53 Set the new values [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:113 Add the user -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:300 Set configuration parameter [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:171 Setting passwordWarning to 3600 [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:303 Binding with (uid=tuser,ou=people,dc=example,dc=com) and requesting password expiry warning time [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:158 Bind with the user and request the password expiry warning time [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:307 Check the state of the control [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:312 (uid=tuser,ou=people,dc=example,dc=com) password will expire in 172799 seconds [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:315 Rebinding as DM | |||
Passed | suites/password/pwdPolicy_warning_test.py::test_with_different_password_states | 0.15 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:46 Get the default values [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:53 Set the new values [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:113 Add the user -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:348 Expire user's password by changing passwordExpirationTime timestamp [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:352 Old passwordExpirationTime: 20201112023239Z [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:355 New passwordExpirationTime: 20201012023239Z [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:358 Attempting to bind with user uid=tuser,ou=people,dc=example,dc=com and retrive the password expiry warning time [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:158 Bind with the user and request the password expiry warning time [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:362 Bind Failed, error: <ExceptionInfo INVALID_CREDENTIALS({'msgtype': 97, 'msgid': 73, 'result': 49, 'desc': 'Invalid credentials', 'ctrls': [('1.3.6.1.4.1.....8.5.1', 0, b'0\x84\x00\x00\x00\x03\x81\x01\x00'), ('2.16.840.1.113730.3.4.4', 0, b'0')], 'info': 'password expired!'}) tblen=10> [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:364 Rebinding as DM [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:367 Reverting back user's passwordExpirationTime [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:371 Rebinding with uid=tuser,ou=people,dc=example,dc=com and retrieving the password expiry warning time [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:158 Bind with the user and request the password expiry warning time [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:374 Check that the control is returned [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:377 user's password will expire in 172800 seconds [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:380 Rebinding as DM | |||
Passed | suites/password/pwdPolicy_warning_test.py::test_default_behavior | 0.04 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:84 Get the default values [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:90 Set the new values [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:113 Add the user -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:406 Binding with uid=tuser,ou=people,dc=example,dc=com and requesting the password expiry warning time [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:158 Bind with the user and request the password expiry warning time [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:410 Check that no control is returned [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:413 Rebinding as DM | |||
Passed | suites/password/pwdPolicy_warning_test.py::test_when_maxage_and_warning_are_the_same | 2.12 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:84 Get the default values [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:90 Set the new values [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:113 Add the user -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:442 Set the new values [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:446 First change user's password to reset its password expiration time [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:453 Binding with uid=tuser,ou=people,dc=example,dc=com and requesting the password expiry warning time [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:158 Bind with the user and request the password expiry warning time [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:457 Check that control is returned evenif passwordSendExpiringTime is set to off [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:461 user's password will expire in 86400 seconds [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:463 Rebinding as DM | |||
Passed | suites/password/pwdPolicy_warning_test.py::test_with_local_policy | 0.05 | |
-----------------------------Captured stdout setup------------------------------ Successfully created user password policy -------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:46 Get the default values [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:53 Set the new values [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:113 Add the user [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:141 Setting fine grained policy for user (uid=tuser,ou=people,dc=example,dc=com) -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:490 Attempting to get password expiry warning time for user uid=tuser,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:158 Bind with the user and request the password expiry warning time [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:493 Check that the control is not returned [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:496 Password expiry warning time is not returned [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:498 Rebinding as DM | |||
Passed | suites/password/pwdPolicy_warning_test.py::test_search_shadowWarning_when_passwordWarning_is_lower | 0.11 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:46 Get the default values [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:53 Set the new values -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:530 Bind as cn=Directory Manager [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:533 Creating test user [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:538 Setting passwordWarning to smaller value than 86400 [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:541 Bind as test user [32mINFO [0m tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:544 Check if attribute shadowWarning is present | |||
Passed | suites/password/pwdPolicy_warning_test.py::test_password_expire_works | 1.67 | |
No log output captured. | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[CLEAR] | 0.10 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwd_algo_test:pwd_algo_test.py:138 Test CLEAR PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[CRYPT] | 0.07 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwd_algo_test:pwd_algo_test.py:138 Test CRYPT PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[CRYPT-MD5] | 0.08 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwd_algo_test:pwd_algo_test.py:138 Test CRYPT-MD5 PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[CRYPT-SHA256] | 0.12 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwd_algo_test:pwd_algo_test.py:138 Test CRYPT-SHA256 PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[CRYPT-SHA512] | 0.12 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwd_algo_test:pwd_algo_test.py:138 Test CRYPT-SHA512 PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[MD5] | 0.09 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwd_algo_test:pwd_algo_test.py:138 Test MD5 PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[SHA] | 0.09 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwd_algo_test:pwd_algo_test.py:138 Test SHA PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[SHA256] | 0.12 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwd_algo_test:pwd_algo_test.py:138 Test SHA256 PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[SHA384] | 0.09 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwd_algo_test:pwd_algo_test.py:138 Test SHA384 PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[SHA512] | 0.08 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwd_algo_test:pwd_algo_test.py:138 Test SHA512 PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[SMD5] | 0.11 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwd_algo_test:pwd_algo_test.py:138 Test SMD5 PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[SSHA] | 0.11 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwd_algo_test:pwd_algo_test.py:138 Test SSHA PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[SSHA256] | 0.10 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwd_algo_test:pwd_algo_test.py:138 Test SSHA256 PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[SSHA384] | 0.08 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwd_algo_test:pwd_algo_test.py:138 Test SSHA384 PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[SSHA512] | 0.08 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwd_algo_test:pwd_algo_test.py:138 Test SSHA512 PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[PBKDF2_SHA256] | 0.37 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwd_algo_test:pwd_algo_test.py:138 Test PBKDF2_SHA256 PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[DEFAULT] | 0.34 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwd_algo_test:pwd_algo_test.py:138 Test DEFAULT PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pbkdf2_algo | 3.70 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwd_algo_test:pwd_algo_test.py:170 Test PASSED | |||
Passed | suites/password/pwd_lockout_bypass_test.py::test_lockout_bypass | 0.57 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/password/pwd_log_test.py::test_hide_unhashed_pwd | 4.21 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwd_log_test:pwd_log_test.py:79 Test complete | |||
Passed | suites/password/pwp_gracel_test.py::test_password_gracelimit_section | 14.18 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/password/pwp_history_test.py::test_history_is_not_overwritten | 4.59 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwp_history_test:pwp_history_test.py:77 Configured password policy. | |||
Passed | suites/password/pwp_history_test.py::test_basic | 7.36 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.pwp_history_test:pwp_history_test.py:152 Configured password policy. [32mINFO [0m tests.suites.password.pwp_history_test:pwp_history_test.py:170 Password change correctly rejected [32mINFO [0m tests.suites.password.pwp_history_test:pwp_history_test.py:201 Correct number of passwords found in history. [32mINFO [0m tests.suites.password.pwp_history_test:pwp_history_test.py:212 Password change correctly rejected [32mINFO [0m tests.suites.password.pwp_history_test:pwp_history_test.py:222 Password change correctly rejected [32mINFO [0m tests.suites.password.pwp_history_test:pwp_history_test.py:232 Password change correctly rejected [32mINFO [0m tests.suites.password.pwp_history_test:pwp_history_test.py:254 Password change correctly rejected [32mINFO [0m tests.suites.password.pwp_history_test:pwp_history_test.py:267 Configured passwordInHistory to 0. [32mINFO [0m tests.suites.password.pwp_history_test:pwp_history_test.py:283 Password change correctly rejected [32mINFO [0m tests.suites.password.pwp_history_test:pwp_history_test.py:299 Configured passwordInHistory to 2. [32mINFO [0m tests.suites.password.pwp_history_test:pwp_history_test.py:312 Password change correctly rejected [32mINFO [0m tests.suites.password.pwp_history_test:pwp_history_test.py:326 Test suite PASSED. | |||
Passed | suites/password/pwp_test.py::test_passwordchange_to_no | 0.14 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/password/pwp_test.py::test_password_check_syntax | 0.45 | |
No log output captured. | |||
Passed | suites/password/pwp_test.py::test_too_big_password | 0.48 | |
No log output captured. | |||
Passed | suites/password/pwp_test.py::test_pwminage | 3.38 | |
No log output captured. | |||
Passed | suites/password/pwp_test.py::test_invalid_credentials | 7.42 | |
No log output captured. | |||
Passed | suites/password/pwp_test.py::test_expiration_date | 1.33 | |
No log output captured. | |||
Passed | suites/password/pwp_test.py::test_passwordlockout | 2.57 | |
No log output captured. | |||
Passed | suites/password/regression_of_bugs_test.py::test_local_password_policy | 0.13 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/password/regression_of_bugs_test.py::test_passwordexpirationtime_attribute | 3.10 | |
No log output captured. | |||
Passed | suites/password/regression_of_bugs_test.py::test_admin_group_to_modify_password | 1.67 | |
No log output captured. | |||
Passed | suites/password/regression_of_bugs_test.py::test_password_max_failure_should_lockout_password | 0.16 | |
No log output captured. | |||
Passed | suites/password/regression_of_bugs_test.py::test_pwd_update_time_attribute | 3.33 | |
No log output captured. | |||
Passed | suites/password/regression_of_bugs_test.py::test_password_track_update_time | 7.42 | |
No log output captured. | |||
Passed | suites/password/regression_of_bugs_test.py::test_signal_11 | 0.10 | |
No log output captured. | |||
Passed | suites/password/regression_test.py::test_pwp_local_unlock | 4.14 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m tests.suites.password.regression_test:regression_test.py:68 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to on [32mINFO [0m tests.suites.password.regression_test:regression_test.py:75 Configure subtree password policy for ou=people,dc=example,dc=com [32mINFO [0m tests.suites.password.regression_test:regression_test.py:98 Adding user-uid=UIDpwtest1,ou=people,dc=example,dc=com -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:137 Verify user can bind... [32mINFO [0m tests.suites.password.regression_test:regression_test.py:140 Test passwordUnlock default - user should be able to reset password after lockout [32mINFO [0m tests.suites.password.regression_test:regression_test.py:151 Verify account is locked [32mINFO [0m tests.suites.password.regression_test:regression_test.py:155 Wait for lockout duration... [32mINFO [0m tests.suites.password.regression_test:regression_test.py:158 Check if user can now bind with correct password | |||
Passed | suites/password/regression_test.py::test_trivial_passw_check[CNpwtest1] | 0.10 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:183 Replace userPassword attribute with CNpwtest1 | |||
Passed | suites/password/regression_test.py::test_trivial_passw_check[SNpwtest1] | 0.08 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:183 Replace userPassword attribute with SNpwtest1 | |||
Passed | suites/password/regression_test.py::test_trivial_passw_check[UIDpwtest1] | 0.08 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:183 Replace userPassword attribute with UIDpwtest1 | |||
Passed | suites/password/regression_test.py::test_trivial_passw_check[MAILpwtest1@redhat.com] | 0.11 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:183 Replace userPassword attribute with MAILpwtest1@redhat.com | |||
Passed | suites/password/regression_test.py::test_trivial_passw_check[GNpwtest1] | 0.10 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:183 Replace userPassword attribute with GNpwtest1 | |||
Passed | suites/password/regression_test.py::test_trivial_passw_check[CNpwtest1ZZZZ] | 0.35 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:183 Replace userPassword attribute with CNpwtest1ZZZZ | |||
Passed | suites/password/regression_test.py::test_trivial_passw_check[ZZZZZCNpwtest1] | 0.10 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:183 Replace userPassword attribute with ZZZZZCNpwtest1 | |||
Passed | suites/password/regression_test.py::test_trivial_passw_check[ZCNpwtest1] | 0.12 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:183 Replace userPassword attribute with ZCNpwtest1 | |||
Passed | suites/password/regression_test.py::test_trivial_passw_check[CNpwtest1Z] | 0.30 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:183 Replace userPassword attribute with CNpwtest1Z | |||
Passed | suites/password/regression_test.py::test_trivial_passw_check[ZCNpwtest1Z] | 0.09 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:183 Replace userPassword attribute with ZCNpwtest1Z | |||
Passed | suites/password/regression_test.py::test_trivial_passw_check[ZZCNpwtest1] | 0.08 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:183 Replace userPassword attribute with ZZCNpwtest1 | |||
Passed | suites/password/regression_test.py::test_trivial_passw_check[CNpwtest1ZZ] | 0.08 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:183 Replace userPassword attribute with CNpwtest1ZZ | |||
Passed | suites/password/regression_test.py::test_trivial_passw_check[ZZCNpwtest1ZZ] | 0.08 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:183 Replace userPassword attribute with ZZCNpwtest1ZZ | |||
Passed | suites/password/regression_test.py::test_trivial_passw_check[ZZZCNpwtest1] | 0.34 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:183 Replace userPassword attribute with ZZZCNpwtest1 | |||
Passed | suites/password/regression_test.py::test_trivial_passw_check[CNpwtest1ZZZ] | 0.09 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:183 Replace userPassword attribute with CNpwtest1ZZZ | |||
Passed | suites/password/regression_test.py::test_trivial_passw_check[ZZZCNpwtest1ZZZ] | 0.09 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:183 Replace userPassword attribute with ZZZCNpwtest1ZZZ | |||
Passed | suites/password/regression_test.py::test_trivial_passw_check[ZZZZZZCNpwtest1ZZZZZZZZ] | 0.35 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:183 Replace userPassword attribute with ZZZZZZCNpwtest1ZZZZZZZZ | |||
Passed | suites/password/regression_test.py::test_global_vs_local[CNpwtest1] | 0.19 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:211 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off [32mINFO [0m tests.suites.password.regression_test:regression_test.py:216 Replace userPassword attribute with CNpwtest1 | |||
Passed | suites/password/regression_test.py::test_global_vs_local[SNpwtest1] | 0.18 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:211 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off [32mINFO [0m tests.suites.password.regression_test:regression_test.py:216 Replace userPassword attribute with SNpwtest1 | |||
Passed | suites/password/regression_test.py::test_global_vs_local[UIDpwtest1] | 0.17 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:211 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off [32mINFO [0m tests.suites.password.regression_test:regression_test.py:216 Replace userPassword attribute with UIDpwtest1 | |||
Passed | suites/password/regression_test.py::test_global_vs_local[MAILpwtest1@redhat.com] | 0.16 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:211 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off [32mINFO [0m tests.suites.password.regression_test:regression_test.py:216 Replace userPassword attribute with MAILpwtest1@redhat.com | |||
Passed | suites/password/regression_test.py::test_global_vs_local[GNpwtest1] | 0.15 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:211 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off [32mINFO [0m tests.suites.password.regression_test:regression_test.py:216 Replace userPassword attribute with GNpwtest1 | |||
Passed | suites/password/regression_test.py::test_global_vs_local[CNpwtest1ZZZZ] | 0.18 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:211 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off [32mINFO [0m tests.suites.password.regression_test:regression_test.py:216 Replace userPassword attribute with CNpwtest1ZZZZ | |||
Passed | suites/password/regression_test.py::test_global_vs_local[ZZZZZCNpwtest1] | 0.19 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:211 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off [32mINFO [0m tests.suites.password.regression_test:regression_test.py:216 Replace userPassword attribute with ZZZZZCNpwtest1 | |||
Passed | suites/password/regression_test.py::test_global_vs_local[ZCNpwtest1] | 0.20 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:211 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off [32mINFO [0m tests.suites.password.regression_test:regression_test.py:216 Replace userPassword attribute with ZCNpwtest1 | |||
Passed | suites/password/regression_test.py::test_global_vs_local[CNpwtest1Z] | 0.38 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:211 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off [32mINFO [0m tests.suites.password.regression_test:regression_test.py:216 Replace userPassword attribute with CNpwtest1Z | |||
Passed | suites/password/regression_test.py::test_global_vs_local[ZCNpwtest1Z] | 0.17 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:211 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off [32mINFO [0m tests.suites.password.regression_test:regression_test.py:216 Replace userPassword attribute with ZCNpwtest1Z | |||
Passed | suites/password/regression_test.py::test_global_vs_local[ZZCNpwtest1] | 0.15 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:211 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off [32mINFO [0m tests.suites.password.regression_test:regression_test.py:216 Replace userPassword attribute with ZZCNpwtest1 | |||
Passed | suites/password/regression_test.py::test_global_vs_local[CNpwtest1ZZ] | 0.16 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:211 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off [32mINFO [0m tests.suites.password.regression_test:regression_test.py:216 Replace userPassword attribute with CNpwtest1ZZ | |||
Passed | suites/password/regression_test.py::test_global_vs_local[ZZCNpwtest1ZZ] | 0.18 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:211 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off [32mINFO [0m tests.suites.password.regression_test:regression_test.py:216 Replace userPassword attribute with ZZCNpwtest1ZZ | |||
Passed | suites/password/regression_test.py::test_global_vs_local[ZZZCNpwtest1] | 0.15 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:211 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off [32mINFO [0m tests.suites.password.regression_test:regression_test.py:216 Replace userPassword attribute with ZZZCNpwtest1 | |||
Passed | suites/password/regression_test.py::test_global_vs_local[CNpwtest1ZZZ] | 0.20 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:211 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off [32mINFO [0m tests.suites.password.regression_test:regression_test.py:216 Replace userPassword attribute with CNpwtest1ZZZ | |||
Passed | suites/password/regression_test.py::test_global_vs_local[ZZZCNpwtest1ZZZ] | 0.20 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:211 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off [32mINFO [0m tests.suites.password.regression_test:regression_test.py:216 Replace userPassword attribute with ZZZCNpwtest1ZZZ | |||
Passed | suites/password/regression_test.py::test_global_vs_local[ZZZZZZCNpwtest1ZZZZZZZZ] | 0.18 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:211 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off [32mINFO [0m tests.suites.password.regression_test:regression_test.py:216 Replace userPassword attribute with ZZZZZZCNpwtest1ZZZZZZZZ | |||
Passed | suites/password/regression_test.py::test_unhashed_pw_switch | 35.96 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.password.regression_test:regression_test.py:257 Enable plugins... [32mINFO [0m tests.suites.password.regression_test:regression_test.py:272 create users and group... [32mINFO [0m lib389:__init__.py:3014 Running script: ['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-master1/db/userRoot/replication_changelog.db'] [32mINFO [0m lib389:__init__.py:3014 Running script: ['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-master1/db/userRoot/replication_changelog.db'] [32mINFO [0m lib389:__init__.py:3014 Running script: ['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-master1/db/userRoot/replication_changelog.db'] | |||
Passed | suites/plugins/acceptance_test.py::test_acctpolicy | 15.37 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38902, 'ldap-secureport': 63602, 'server-id': 'standalone2', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/plugins/acceptance_test.py::test_attruniq | 19.60 | |
No log output captured. | |||
Passed | suites/plugins/acceptance_test.py::test_automember | 27.14 | |
No log output captured. | |||
Passed | suites/plugins/acceptance_test.py::test_dna | 19.11 | |
No log output captured. | |||
Passed | suites/plugins/acceptance_test.py::test_linkedattrs | 27.20 | |
No log output captured. | |||
Passed | suites/plugins/acceptance_test.py::test_memberof | 36.81 | |
No log output captured. | |||
Passed | suites/plugins/acceptance_test.py::test_mep | 19.78 | |
No log output captured. | |||
Passed | suites/plugins/acceptance_test.py::test_passthru | 23.85 | |
No log output captured. | |||
Passed | suites/plugins/acceptance_test.py::test_referint | 14.52 | |
No log output captured. | |||
Passed | suites/plugins/acceptance_test.py::test_retrocl | 24.88 | |
No log output captured. | |||
Passed | suites/plugins/acceptance_test.py::test_rootdn | 37.60 | |
No log output captured. | |||
Passed | suites/plugins/accpol_test.py::test_glact_inact | 27.41 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.utils:accpol_test.py:35 Configuring Global account policy plugin, pwpolicy attributes and restarting the server -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:accpol_test.py:348 AccountInactivityLimit set to 12. Account will be inactivated if not accessed in 12 secs [32mINFO [0m lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs [32mINFO [0m lib389.utils:accpol_test.py:351 Sleep for 10 secs to check if account is not inactivated, expected value 0 [32mINFO [0m lib389.utils:accpol_test.py:353 Account should not be inactivated since AccountInactivityLimit not exceeded [32mINFO [0m lib389.utils:accpol_test.py:356 Sleep for 3 more secs to check if account is inactivated [32mINFO [0m lib389.utils:accpol_test.py:360 Sleep +10 secs to check if account glinactusr3 is inactivated [32mINFO [0m lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs | |||
Passed | suites/plugins/accpol_test.py::test_glremv_lastlogin | 19.23 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:accpol_test.py:394 AccountInactivityLimit set to 12. Account will be inactivated if not accessed in 12 secs [32mINFO [0m lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs [32mINFO [0m lib389.utils:accpol_test.py:396 Sleep for 6 secs to check if account is not inactivated, expected value 0 [32mINFO [0m lib389.utils:accpol_test.py:398 Account should not be inactivated since AccountInactivityLimit not exceeded [32mINFO [0m lib389.utils:accpol_test.py:254 Delete lastLoginTime/createTimeStamp/ModifyTimeStamp attribute from user account [32mINFO [0m lib389.utils:accpol_test.py:401 Sleep for 7 more secs to check if account is inactivated [32mINFO [0m lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute [32mINFO [0m lib389.utils:accpol_test.py:405 Check if account is activated, expected 0 [32mINFO [0m lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs | |||
Passed | suites/plugins/accpol_test.py::test_glact_login | 23.54 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:accpol_test.py:434 AccountInactivityLimit set to 12. Account will be inactivated if not accessed in 12 secs [32mINFO [0m lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs [32mINFO [0m lib389.utils:accpol_test.py:436 Sleep for 13 secs to check if account is inactivated, expected error 19 [32mINFO [0m lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute [32mINFO [0m lib389.utils:accpol_test.py:440 Check if account is activated, expected 0 [32mINFO [0m lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs | |||
Passed | suites/plugins/accpol_test.py::test_glinact_limit | 122.87 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:accpol_test.py:492 AccountInactivityLimit set to 12. Account will be inactivated if not accessed in 12 secs [32mINFO [0m lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs [32mINFO [0m lib389.utils:accpol_test.py:494 Sleep for 9 secs to check if account is not inactivated, expected 0 [32mINFO [0m lib389.utils:accpol_test.py:189 Modify attribute value for a given DN [32mINFO [0m lib389.utils:accpol_test.py:189 Modify attribute value for a given DN [32mINFO [0m lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs [32mINFO [0m lib389.utils:accpol_test.py:189 Modify attribute value for a given DN [32mINFO [0m lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs [32mINFO [0m lib389.utils:accpol_test.py:516 Check if account is activated, expected 0 [32mINFO [0m lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute [32mINFO [0m lib389.utils:accpol_test.py:189 Modify attribute value for a given DN [32mINFO [0m lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs | |||
Passed | suites/plugins/accpol_test.py::test_glnologin_attr | 86.99 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:accpol_test.py:575 AccountInactivityLimit set to 12. Account will be inactivated if not accessed in 12 secs [32mINFO [0m lib389.utils:accpol_test.py:576 Set attribute StateAttrName to createTimestamp, loginTime attr wont be considered [32mINFO [0m lib389.utils:accpol_test.py:189 Modify attribute value for a given DN [32mINFO [0m lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs [32mINFO [0m lib389.utils:accpol_test.py:580 Sleep for 9 secs to check if account is not inactivated, expected 0 [32mINFO [0m lib389.utils:accpol_test.py:189 Modify attribute value for a given DN [32mINFO [0m lib389.utils:accpol_test.py:189 Modify attribute value for a given DN [32mINFO [0m lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs [32mINFO [0m lib389.utils:accpol_test.py:189 Modify attribute value for a given DN [32mINFO [0m lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs [32mINFO [0m lib389.utils:accpol_test.py:189 Modify attribute value for a given DN [32mINFO [0m lib389.utils:accpol_test.py:605 Set attribute StateAttrName to lastLoginTime, the default [32mINFO [0m lib389.utils:accpol_test.py:189 Modify attribute value for a given DN [32mINFO [0m lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute [32mINFO [0m lib389.utils:accpol_test.py:609 Check if account is activated, expected 0 [32mINFO [0m lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs | |||
Passed | suites/plugins/accpol_test.py::test_glnoalt_stattr | 57.94 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:accpol_test.py:640 Set attribute altStateAttrName to 1.1 [32mINFO [0m lib389.utils:accpol_test.py:189 Modify attribute value for a given DN [32mINFO [0m lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs [32mINFO [0m lib389.utils:accpol_test.py:644 Sleep for 13 secs to check if account is not inactivated, expected 0 [32mINFO [0m lib389.utils:accpol_test.py:647 lastLoginTime attribute is added from the above ldap bind by userdn [32mINFO [0m lib389.utils:accpol_test.py:254 Delete lastLoginTime/createTimeStamp/ModifyTimeStamp attribute from user account [32mINFO [0m lib389.utils:accpol_test.py:189 Modify attribute value for a given DN [32mINFO [0m lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute [32mINFO [0m lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs | |||
Passed | suites/plugins/accpol_test.py::test_glattr_modtime | 45.83 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:accpol_test.py:687 Set attribute altStateAttrName to modifyTimestamp [32mINFO [0m lib389.utils:accpol_test.py:189 Modify attribute value for a given DN [32mINFO [0m lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs [32mINFO [0m lib389.utils:accpol_test.py:691 Sleep for 13 secs to check if account is inactivated, expected 0 [32mINFO [0m lib389.utils:accpol_test.py:202 Check ModifyTimeStamp attribute present for user [32mINFO [0m lib389.utils:accpol_test.py:237 Enable account by replacing cn attribute value, value of modifyTimeStamp changed [32mINFO [0m lib389.utils:accpol_test.py:189 Modify attribute value for a given DN [32mINFO [0m lib389.utils:accpol_test.py:254 Delete lastLoginTime/createTimeStamp/ModifyTimeStamp attribute from user account [32mINFO [0m lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute [32mINFO [0m lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs | |||
Passed | suites/plugins/accpol_test.py::test_glnoalt_nologin | 52.66 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:accpol_test.py:742 Set attribute altStateAttrName to 1.1 [32mINFO [0m lib389.utils:accpol_test.py:189 Modify attribute value for a given DN [32mINFO [0m lib389.utils:accpol_test.py:744 Set attribute alwaysrecordlogin to No [32mINFO [0m lib389.utils:accpol_test.py:189 Modify attribute value for a given DN [32mINFO [0m lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs [32mINFO [0m lib389.utils:accpol_test.py:748 Sleep for 13 secs to check if account is not inactivated, expected 0 [32mINFO [0m lib389.utils:accpol_test.py:753 Set attribute altStateAttrName to createTimestamp [32mINFO [0m lib389.utils:accpol_test.py:189 Modify attribute value for a given DN [32mINFO [0m lib389.utils:accpol_test.py:758 Reset the default attribute values [32mINFO [0m lib389.utils:accpol_test.py:189 Modify attribute value for a given DN [32mINFO [0m lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute [32mINFO [0m lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs | |||
Passed | suites/plugins/accpol_test.py::test_glinact_nsact | 23.45 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:accpol_test.py:799 AccountInactivityLimit set to 12. Account will be inactivated if not accessed in 12 secs [32mINFO [0m lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs [32mINFO [0m lib389.utils:accpol_test.py:801 Sleep for 3 secs to check if account is not inactivated, expected value 0 [32mINFO [0m lib389.utils:accpol_test.py:162 Account activate/in-activate/status using dsidm [32mINFO [0m lib389.utils:accpol_test.py:166 Running unlock for user uid=nsactusr1,ou=groups,dc=example,dc=com [32mINFO [0m lib389.utils:accpol_test.py:174 Running ['/usr/sbin/dsidm', 'slapd-standalone1', '-b', 'dc=example,dc=com', 'account', 'unlock', 'uid=nsactusr1,ou=groups,dc=example,dc=com'] for user uid=nsactusr1,ou=groups,dc=example,dc=com [32mINFO [0m lib389.utils:accpol_test.py:180 output: b'Error: Account is already active\n' [32mINFO [0m lib389.utils:accpol_test.py:804 Sleep for 10 secs to check if account is inactivated, expected value 19 [32mINFO [0m lib389.utils:accpol_test.py:162 Account activate/in-activate/status using dsidm [32mINFO [0m lib389.utils:accpol_test.py:166 Running unlock for user uid=nsactusr1,ou=groups,dc=example,dc=com [32mINFO [0m lib389.utils:accpol_test.py:174 Running ['/usr/sbin/dsidm', 'slapd-standalone1', '-b', 'dc=example,dc=com', 'account', 'unlock', 'uid=nsactusr1,ou=groups,dc=example,dc=com'] for user uid=nsactusr1,ou=groups,dc=example,dc=com [32mINFO [0m lib389.utils:accpol_test.py:180 output: b'Error: 103 - 22 - 16 - No such attribute - []\n' [32mINFO [0m lib389.utils:accpol_test.py:162 Account activate/in-activate/status using dsidm [32mINFO [0m lib389.utils:accpol_test.py:166 Running entry-status for user uid=nsactusr1,ou=groups,dc=example,dc=com [32mINFO [0m lib389.utils:accpol_test.py:174 Running ['/usr/sbin/dsidm', 'slapd-standalone1', '-b', 'dc=example,dc=com', 'account', 'entry-status', 'uid=nsactusr1,ou=groups,dc=example,dc=com'] for user uid=nsactusr1,ou=groups,dc=example,dc=com [32mINFO [0m lib389.utils:accpol_test.py:180 output: b'Entry DN: uid=nsactusr1,ou=groups,dc=example,dc=com\nEntry Creation Date: 20201110024930Z (2020-11-10 02:49:30)\nEntry Modification Date: 20201110024930Z (2020-11-10 02:49:30)\nEntry Last Login Date: 20201110024930Z (2020-11-10 02:49:30)\nEntry Time Since Inactive: 6 seconds (2020-11-10 02:49:42)\nEntry State: inactivity limit exceeded\n\n' [32mINFO [0m lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute [32mINFO [0m lib389.utils:accpol_test.py:162 Account activate/in-activate/status using dsidm [32mINFO [0m lib389.utils:accpol_test.py:166 Running entry-status for user uid=nsactusr1,ou=groups,dc=example,dc=com [32mINFO [0m lib389.utils:accpol_test.py:174 Running ['/usr/sbin/dsidm', 'slapd-standalone1', '-b', 'dc=example,dc=com', 'account', 'entry-status', 'uid=nsactusr1,ou=groups,dc=example,dc=com'] for user uid=nsactusr1,ou=groups,dc=example,dc=com [32mINFO [0m lib389.utils:accpol_test.py:180 output: b'Entry DN: uid=nsactusr1,ou=groups,dc=example,dc=com\nEntry Creation Date: 20201110024930Z (2020-11-10 02:49:30)\nEntry Modification Date: 20201110024949Z (2020-11-10 02:49:49)\nEntry Last Login Date: 20201110024951Z (2020-11-10 02:49:51)\nEntry Time Until Inactive: 11 seconds (2020-11-10 02:50:03)\nEntry State: activated\n\n' [32mINFO [0m lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs | |||
Passed | suites/plugins/accpol_test.py::test_glinact_acclock | 40.42 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:accpol_test.py:844 AccountInactivityLimit set to 12. Account will be inactivated if not accessed in 12 secs [32mINFO [0m lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs [32mINFO [0m lib389.utils:accpol_test.py:846 Sleep for 3 secs and try invalid binds to lockout the user [32mINFO [0m lib389.utils:accpol_test.py:118 Lockout user account by attempting invalid password binds [32mINFO [0m lib389.utils:accpol_test.py:850 Sleep for 10 secs to check if account is inactivated, expected value 19 [32mINFO [0m lib389.utils:accpol_test.py:854 Add lastLoginTime to activate the user account [32mINFO [0m lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute [32mINFO [0m lib389.utils:accpol_test.py:858 Checking if account is unlocked after passwordlockoutduration, but inactivated after accountInactivityLimit [32mINFO [0m lib389.utils:accpol_test.py:118 Lockout user account by attempting invalid password binds [32mINFO [0m lib389.utils:accpol_test.py:862 Account is expected to be unlocked after 5 secs of passwordlockoutduration [32mINFO [0m lib389.utils:accpol_test.py:866 Sleep 13s and check if account inactivated based on accountInactivityLimit, expected 19 [32mINFO [0m lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs | |||
Passed | suites/plugins/accpol_test.py::test_glnact_pwexp | 49.49 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:accpol_test.py:909 AccountInactivityLimit set to 12. Account will be inactivated if not accessed in 12 secs [32mINFO [0m lib389.utils:accpol_test.py:910 Passwordmaxage is set to 9. Password will expire in 9 secs [32mINFO [0m lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs [32mINFO [0m lib389.utils:accpol_test.py:913 Sleep for 9 secs and check if password expired [32mINFO [0m lib389.utils:accpol_test.py:919 Add lastLoginTime to activate the user account [32mINFO [0m lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute [32mINFO [0m lib389.utils:accpol_test.py:141 Reset user password for user-uid=pwexpusr1,ou=groups,dc=example,dc=com [32mINFO [0m lib389.utils:accpol_test.py:141 Reset user password for user-uid=pwexpusr1,ou=groups,dc=example,dc=com [32mINFO [0m lib389.utils:accpol_test.py:930 Sleep for 4 secs and check if account is now inactivated, expected error 19 [32mINFO [0m lib389.utils:accpol_test.py:141 Reset user password for user-uid=pwexpusr1,ou=groups,dc=example,dc=com [32mINFO [0m lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute [32mINFO [0m lib389.utils:accpol_test.py:141 Reset user password for user-uid=pwexpusr1,ou=groups,dc=example,dc=com [32mINFO [0m lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs | |||
Passed | suites/plugins/accpol_test.py::test_locact_inact | 31.57 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.utils:accpol_test.py:80 Adding Local account policy plugin configuration entries -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:accpol_test.py:981 AccountInactivityLimit set to 10. Account will be inactivated if not accessed in 10 secs [32mINFO [0m lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs [32mINFO [0m lib389.utils:accpol_test.py:983 Sleep for 9 secs to check if account is not inactivated, expected value 0 [32mINFO [0m lib389.utils:accpol_test.py:985 Account should not be inactivated since AccountInactivityLimit not exceeded [32mINFO [0m lib389.utils:accpol_test.py:987 Sleep for 2 more secs to check if account is inactivated [32mINFO [0m lib389.utils:accpol_test.py:990 Sleep +9 secs to check if account inactusr3 is inactivated [32mINFO [0m lib389.utils:accpol_test.py:993 Add lastLoginTime attribute to all users and check if its activated [32mINFO [0m lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute [32mINFO [0m lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs | |||
Passed | suites/plugins/accpol_test.py::test_locinact_modrdn | 27.15 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:accpol_test.py:1027 Account should not be inactivated since the subtree is not configured [32mINFO [0m lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs [32mINFO [0m lib389.utils:accpol_test.py:1029 Sleep for 11 secs to check if account is not inactivated, expected value 0 [32mINFO [0m lib389.utils:accpol_test.py:1032 Moving users from ou=groups to ou=people subtree [32mINFO [0m lib389.utils:accpol_test.py:1040 Then wait for 11 secs and check if entries are inactivated [32mINFO [0m lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute [32mINFO [0m lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs | |||
Passed | suites/plugins/accpol_test.py::test_locact_modrdn | 15.11 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:accpol_test.py:1072 Account should be inactivated since the subtree is configured [32mINFO [0m lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs [32mINFO [0m lib389.utils:accpol_test.py:1074 Sleep for 11 secs to check if account is inactivated, expected value 19 [32mINFO [0m lib389.utils:accpol_test.py:1077 Moving users from ou=people to ou=groups subtree [32mINFO [0m lib389.utils:accpol_test.py:1084 Sleep for +2 secs and check users from both ou=people and ou=groups subtree [32mINFO [0m lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs | |||
Passed | suites/plugins/attr_nsslapd-pluginarg_test.py::test_duplicate_values | 3.74 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:31 Ticket 47431 - 0: Enable 7bit plugin... -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:56 Ticket 47431 - 1: Check 26 duplicate values are treated as one... [35mDEBUG [0m tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:59 modify_s cn=7-bit check,cn=plugins,cn=config [35mDEBUG [0m tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:83 line: [09/Nov/2020:21:53:08.311475682 -0500] - WARN - str2entry_dupcheck - 26 duplicate values for attribute type nsslapd-pluginarg2 detected in entry cn=7-bit check,cn=plugins,cn=config. Extra values ignored. [32mINFO [0m tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:84 Expected error "str2entry_dupcheck.* duplicate values for attribute type nsslapd-pluginarg2 detected in entry cn=7-bit check,cn=plugins,cn=config." logged in /var/log/dirsrv/slapd-standalone1/errors [32mINFO [0m tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:86 Ticket 47431 - 1: done | |||
Passed | suites/plugins/attr_nsslapd-pluginarg_test.py::test_multiple_value | 6.70 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:111 Ticket 47431 - 2: Check two values belonging to one arg is fixed... [35mDEBUG [0m tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:131 line - [09/Nov/2020:21:53:13.468994586 -0500] - DEBUG - NS7bitAttr - NS7bitAttr_Init - 0: uid [35mDEBUG [0m tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:132 ATTRS[0] uid [35mDEBUG [0m tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:136 uid was logged [35mDEBUG [0m tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:131 line - [09/Nov/2020:21:53:13.477266427 -0500] - DEBUG - NS7bitAttr - NS7bitAttr_Init - 1: mail [35mDEBUG [0m tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:132 ATTRS[1] mail [35mDEBUG [0m tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:136 mail was logged [35mDEBUG [0m tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:131 line - [09/Nov/2020:21:53:13.485987666 -0500] - DEBUG - NS7bitAttr - NS7bitAttr_Init - 2: userpassword [35mDEBUG [0m tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:132 ATTRS[2] userpassword [35mDEBUG [0m tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:136 userpassword was logged [35mDEBUG [0m tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:131 line - [09/Nov/2020:21:53:13.492653019 -0500] - DEBUG - NS7bitAttr - NS7bitAttr_Init - 3: , [35mDEBUG [0m tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:132 ATTRS[3] , [35mDEBUG [0m tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:136 , was logged [35mDEBUG [0m tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:131 line - [09/Nov/2020:21:53:13.496426471 -0500] - DEBUG - NS7bitAttr - NS7bitAttr_Init - 4: dc=example,dc=com [35mDEBUG [0m tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:132 ATTRS[4] dc=example,dc=com [35mDEBUG [0m tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:136 dc=example,dc=com was logged [32mINFO [0m tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:142 Ticket 47431 - 2: done | |||
Passed | suites/plugins/attr_nsslapd-pluginarg_test.py::test_missing_args | 5.18 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:170 Ticket 47431 - 3: Check missing args are fixed... [35mDEBUG [0m tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:197 uid was logged [35mDEBUG [0m tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:197 mail was logged [35mDEBUG [0m tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:197 userpassword was logged [35mDEBUG [0m tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:197 , was logged [35mDEBUG [0m tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:197 dc=example,dc=com was logged [32mINFO [0m tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:203 Ticket 47431 - 3: done [32mINFO [0m tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:204 Test complete | |||
Passed | suites/plugins/cos_test.py::test_cos_operational_default | 4.97 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [31mCRITICAL[0m lib389.utils:cos_test.py:27 Adding user (uid=user_0,ou=people,dc=example,dc=com): [32mINFO [0m lib389.utils:cos_test.py:153 Returned telephonenumber (exp. real): b'1234 is real' [32mINFO [0m lib389.utils:cos_test.py:154 Returned telephonenumber: 8 [32mINFO [0m lib389.utils:cos_test.py:160 Returned l (exp. real): b'here is real' [32mINFO [0m lib389.utils:cos_test.py:161 Returned l: 8 [32mINFO [0m lib389.utils:cos_test.py:170 Returned seealso (exp. virtual): b'dc=virtual,dc=example,dc=com' [32mINFO [0m lib389.utils:cos_test.py:171 Returned seealso: 3 [32mINFO [0m lib389.utils:cos_test.py:180 Returned description (exp. virtual): b'desc is virtual' [32mINFO [0m lib389.utils:cos_test.py:181 Returned description: 8 [32mINFO [0m lib389.utils:cos_test.py:191 Returned title (exp. real): b'title is real' [32mINFO [0m lib389.utils:cos_test.py:212 Returned title(exp. virt): b'title is virtual 1' [32mINFO [0m lib389.utils:cos_test.py:212 Returned title(exp. virt): b'title is virtual 0' | |||
Passed | suites/plugins/deref_aci_test.py::test_deref_and_access_control | 0.30 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.plugins.deref_aci_test:deref_aci_test.py:133 Check, that the dereference search result does not have userpassword | |||
Passed | suites/plugins/dna_test.py::test_dnatype_only_valid | 4.94 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/plugins/entryusn_test.py::test_entryusn_no_duplicates | 6.30 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/plugins/entryusn_test.py::test_entryusn_is_same_after_failure | 5.88 | |
No log output captured. | |||
Passed | suites/plugins/entryusn_test.py::test_entryusn_after_repl_delete | 5.23 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect fa3c68c6-59a7-4499-bc50-7684d8adcaed / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 97b3fe1c-0329-4a6d-b55d-d1a79c839091 / got description=fa3c68c6-59a7-4499-bc50-7684d8adcaed) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists | |||
Passed | suites/plugins/managed_entry_test.py::test_binddn_tracking | 2.34 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/plugins/managed_entry_test.py::test_mentry01 | 10.76 | |
No log output captured. | |||
Passed | suites/plugins/managed_entry_test.py::test_managed_entry_removal | 4.38 | |
No log output captured. | |||
Passed | suites/plugins/memberof_test.py::test_betxnpostoperation_replace | 4.86 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/plugins/memberof_test.py::test_memberofgroupattr_add | 0.03 | |
No log output captured. | |||
Passed | suites/plugins/memberof_test.py::test_enable | 4.60 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:181 Enable MemberOf plugin | |||
Passed | suites/plugins/memberof_test.py::test_member_add | 0.63 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofenh1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofenh2,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:211 Update b'uid=user_memofenh1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp1,ou=groups,dc=example,dc=com' (member) [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:212 Update b'uid=user_memofenh2,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp1,ou=groups,dc=example,dc=com' (uniqueMember) [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:215 Update b'uid=user_memofenh1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp2,ou=groups,dc=example,dc=com' (member) [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:216 Update b'uid=user_memofenh2,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp2,ou=groups,dc=example,dc=com' (uniqueMember) [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified | |||
Passed | suites/plugins/memberof_test.py::test_member_delete_gr1 | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:246 Update b'uid=user_memofenh1,ou=people,dc=example,dc=com' is no longer memberof b'cn=group_memofegrp1,ou=groups,dc=example,dc=com' (member) [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified | |||
Passed | suites/plugins/memberof_test.py::test_member_delete_gr2 | 0.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:278 Update b'uid=user_memofenh1,ou=people,dc=example,dc=com' is no longer memberof b'cn=group_memofegrp1,ou=groups,dc=example,dc=com' (uniqueMember) [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com | |||
Passed | suites/plugins/memberof_test.py::test_member_delete_all | 0.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:310 Update b'uid=user_memofenh2,ou=people,dc=example,dc=com' is no longer memberof b'cn=group_memofegrp1,ou=groups,dc=example,dc=com' (uniqueMember) [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:314 Update b'uid=user_memofenh1,ou=people,dc=example,dc=com' is no longer memberof b'cn=group_memofegrp2,ou=groups,dc=example,dc=com' (member) [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' | |||
Passed | suites/plugins/memberof_test.py::test_member_after_restart | 8.93 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:349 Update b'uid=user_memofenh1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp1,ou=groups,dc=example,dc=com' (member) [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:353 Update b'uid=user_memofenh2,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp2,ou=groups,dc=example,dc=com' (uniqueMember) [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:364 Remove uniqueMember as a memberofgrpattr [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:371 Assert that this change of configuration did change the already set values [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified | |||
Passed | suites/plugins/memberof_test.py::test_memberofgroupattr_uid | 0.00 | |
-------------------------------Captured log call-------------------------------- [31m[1mERROR [0m tests.suites.plugins.memberof_test:memberof_test.py:400 Setting 'memberUid' as memberofgroupattr is rejected (expected) | |||
Passed | suites/plugins/memberof_test.py::test_member_add_duplicate_usr1 | 0.16 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:422 Try b'uid=user_memofenh1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp1,ou=groups,dc=example,dc=com' (member) [31m[1mERROR [0m tests.suites.plugins.memberof_test:memberof_test.py:429 b'uid=user_memofenh1,ou=people,dc=example,dc=com' already member of b'cn=group_memofegrp1,ou=groups,dc=example,dc=com' --> fail (expected) | |||
Passed | suites/plugins/memberof_test.py::test_member_add_duplicate_usr2 | 0.78 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:450 Check initial status [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:460 Try b'uid=user_memofenh2,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp2,ou=groups,dc=example,dc=com' (member) [31m[1mERROR [0m tests.suites.plugins.memberof_test:memberof_test.py:467 b'uid=user_memofenh2,ou=people,dc=example,dc=com' already member of b'cn=group_memofegrp2,ou=groups,dc=example,dc=com' --> fail (expected) [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:470 Check final status [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified | |||
Passed | suites/plugins/memberof_test.py::test_member_uniquemember_same_user | 0.32 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:557 Check initial status [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:569 Update b'uid=user_memofenh1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp3,ou=groups,dc=example,dc=com' (member) [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:570 Update b'uid=user_memofenh1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp3,ou=groups,dc=example,dc=com' (uniqueMember) [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:582 Update b'uid=user_memofenh1,ou=people,dc=example,dc=com' is not memberof b'cn=group_memofegrp3,ou=groups,dc=example,dc=com' (member) [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:586 Update b'uid=user_memofenh2,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp3,ou=groups,dc=example,dc=com' (member) [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:613 Checking final status [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified | |||
Passed | suites/plugins/memberof_test.py::test_member_not_exists | 0.04 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:671 Checking Initial status [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp015,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:691 Update b'uid=user_dummy1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp015,ou=groups,dc=example,dc=com' (member) [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:692 Update b'uid=user_dummy2,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp015,ou=groups,dc=example,dc=com' (uniqueMember) [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com | |||
Passed | suites/plugins/memberof_test.py::test_member_not_exists_complex | 0.08 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:806 Update b'uid=user_memofenh1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp016,ou=groups,dc=example,dc=com' (member) [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:807 Update b'uid=user_memofenh1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp016,ou=groups,dc=example,dc=com' (uniqueMember) [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:835 Update b'uid=user_dummy1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp016,ou=groups,dc=example,dc=com' (member) [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:845 Update b'uid=user_dummy1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp016,ou=groups,dc=example,dc=com' (uniqueMember) [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com | |||
Passed | suites/plugins/memberof_test.py::test_complex_group_scenario_1 | 0.10 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofuser2,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofuser3,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:1011 Update b'uid=user_memofuser1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp017,ou=groups,dc=example,dc=com' (member) [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:1012 Update b'uid=user_memofuser2,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp017,ou=groups,dc=example,dc=com' (uniqueMember) [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:1013 Update b'uid=user_memofuser3,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp017,ou=groups,dc=example,dc=com' (memberuid) [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' | |||
Passed | suites/plugins/memberof_test.py::test_complex_group_scenario_2 | 0.14 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp018,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:1261 Update b'uid=user_memofuser1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp017,ou=groups,dc=example,dc=com' (member) [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:1262 Update b'uid=user_memofuser1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp017,ou=groups,dc=example,dc=com' (uniqueMember) [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:1263 Update b'uid=user_memofuser1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp017,ou=groups,dc=example,dc=com' (memberuid) [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp018,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp018,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp018,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp018,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp018,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp018,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:1283 Update b'uid=user_memofuser1,ou=people,dc=example,dc=com' is no longer memberof b'cn=group_memofegrp018,ou=groups,dc=example,dc=com' (member) [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:1284 Update b'uid=user_memofuser1,ou=people,dc=example,dc=com' is no longer memberof b'cn=group_memofegrp018,ou=groups,dc=example,dc=com' (uniqueMember) [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com | |||
Passed | suites/plugins/memberof_test.py::test_complex_group_scenario_3 | 0.31 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofuser2,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofuser3,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp019_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp019_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp019_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com | |||
Passed | suites/plugins/memberof_test.py::test_complex_group_scenario_4 | 0.29 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified | |||
Passed | suites/plugins/memberof_test.py::test_complex_group_scenario_5 | 0.22 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofuser2,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofuser3,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofuser4,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com | |||
Passed | suites/plugins/memberof_test.py::test_complex_group_scenario_6 | 5.30 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser2,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser3,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser4,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com | |||
Passed | suites/plugins/memberof_test.py::test_complex_group_scenario_7 | 0.17 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser2,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser3,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser4,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser2,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser3,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser4,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser2,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser3,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser4,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified | |||
Passed | suites/plugins/memberof_test.py::test_complex_group_scenario_8 | 0.11 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser2,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser3,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser4,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser2,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser3,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser4,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser2,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser3,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser4,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser2,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser3,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser4,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified | |||
Passed | suites/plugins/memberof_test.py::test_complex_group_scenario_9 | 0.22 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser2,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser3,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser4,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser2,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser3,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser4,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' | |||
Passed | suites/plugins/memberof_test.py::test_memberof_auto_add_oc | 0.23 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from uid=user1,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group,dc=example,dc=com [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:2753 Correctly rejected invalid objectclass [32mINFO [0m tests.suites.plugins.memberof_test:memberof_test.py:2823 Test complete. | |||
Passed | suites/plugins/pluginpath_validation_test.py::test_pluginpath_validation | 0.31 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.plugins.pluginpath_validation_test:pluginpath_validation_test.py:103 Test complete | |||
Passed | suites/plugins/referint_test.py::test_referential_false_failure | 7.37 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo0,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo1,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo2,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo3,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo4,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo5,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo6,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo7,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo8,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo9,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo10,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo11,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo12,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo13,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo14,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo15,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo16,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo17,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo18,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo19,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo20,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo21,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo22,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo23,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo24,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo25,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo26,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo27,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo28,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo29,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo30,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo31,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo32,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo33,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo34,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo35,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo36,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo37,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo38,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo39,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo40,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo41,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo42,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo43,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo44,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo45,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo46,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo47,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo48,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo49,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo50,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo51,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo52,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo53,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo54,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo55,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo56,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo57,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo58,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo59,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo60,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo61,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo62,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo63,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo64,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo65,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo66,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo67,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo68,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo69,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo70,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo71,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo72,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo73,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo74,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo75,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo76,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo77,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo78,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo79,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo80,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo81,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo82,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo83,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo84,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo85,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo86,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo87,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo88,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo89,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo90,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo91,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo92,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo93,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo94,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo95,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo96,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo97,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo98,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo99,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo100,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo101,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo102,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo103,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo104,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo105,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo106,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo107,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo108,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo109,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo110,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo111,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo112,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo113,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo114,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo115,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo116,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo117,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo118,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo119,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo120,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo121,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo122,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo123,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo124,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo125,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo126,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo127,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo128,dc=example,dc=com): [31mCRITICAL[0m tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo129,dc=example,dc=com): | |||
Passed | suites/plugins/rootdn_plugin_test.py::test_rootdn_access_specific_time | 0.31 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:60 Initializing root DN test suite... [32mINFO [0m tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:70 test_rootdn_init: Initialized root DN test suite. [32mINFO [0m tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:36 Cleaning up the config area -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:102 Running test_rootdn_access_specific_time... | |||
Passed | suites/plugins/rootdn_plugin_test.py::test_rootdn_access_day_of_week | 2.61 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:36 Cleaning up the config area -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:185 Running test_rootdn_access_day_of_week... [32mINFO [0m tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:202 Today: Tue [32mINFO [0m tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:203 Allowed days: Tue,Wed [32mINFO [0m tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:204 Deny days: Thu,Fri | |||
Passed | suites/plugins/rootdn_plugin_test.py::test_rootdn_access_denied_ip | 2.60 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:36 Cleaning up the config area -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:268 Running test_rootdn_access_denied_ip... | |||
Passed | suites/plugins/rootdn_plugin_test.py::test_rootdn_access_denied_host | 2.63 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:36 Cleaning up the config area -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:333 Running test_rootdn_access_denied_host... | |||
Passed | suites/plugins/rootdn_plugin_test.py::test_rootdn_access_allowed_ip | 2.60 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:36 Cleaning up the config area -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:400 Running test_rootdn_access_allowed_ip... | |||
Passed | suites/plugins/rootdn_plugin_test.py::test_rootdn_access_allowed_host | 2.64 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:36 Cleaning up the config area -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:466 Running test_rootdn_access_allowed_host... | |||
Passed | suites/plugins/rootdn_plugin_test.py::test_rootdn_config_validate | 0.02 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:36 Cleaning up the config area -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:575 Add just "rootdn-open-time" | |||
Passed | suites/plugins/rootdn_plugin_test.py::test_rootdn_access_denied_ip_wildcard | 2.57 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:36 Cleaning up the config area -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:666 Running test_rootdn_access_denied_ip_wildcard... | |||
Passed | suites/plugins/rootdn_plugin_test.py::test_rootdn_access_allowed_ip_wildcard | 3.12 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:36 Cleaning up the config area -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:712 Running test_rootdn_access_allowed_ip... | |||
Passed | suites/psearch/psearch_test.py::test_psearch | 2.03 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:psearch_test.py:30 dc=example,dc=com has changed! [32mINFO [0m lib389:psearch_test.py:30 ou=groups,dc=example,dc=com has changed! [32mINFO [0m lib389:psearch_test.py:30 ou=people,dc=example,dc=com has changed! [32mINFO [0m lib389:psearch_test.py:30 ou=permissions,dc=example,dc=com has changed! [32mINFO [0m lib389:psearch_test.py:30 ou=services,dc=example,dc=com has changed! [32mINFO [0m lib389:psearch_test.py:30 uid=demo_user,ou=people,dc=example,dc=com has changed! [32mINFO [0m lib389:psearch_test.py:30 cn=demo_group,ou=groups,dc=example,dc=com has changed! [32mINFO [0m lib389:psearch_test.py:30 cn=group_admin,ou=permissions,dc=example,dc=com has changed! [32mINFO [0m lib389:psearch_test.py:30 cn=group_modify,ou=permissions,dc=example,dc=com has changed! [32mINFO [0m lib389:psearch_test.py:30 cn=user_admin,ou=permissions,dc=example,dc=com has changed! [32mINFO [0m lib389:psearch_test.py:30 cn=user_modify,ou=permissions,dc=example,dc=com has changed! [32mINFO [0m lib389:psearch_test.py:30 cn=user_passwd_reset,ou=permissions,dc=example,dc=com has changed! [32mINFO [0m lib389:psearch_test.py:30 cn=user_private_read,ou=permissions,dc=example,dc=com has changed! [32mINFO [0m lib389:psearch_test.py:34 No more results [32mINFO [0m lib389:psearch_test.py:30 cn=group1,ou=groups,dc=example,dc=com has changed! [32mINFO [0m lib389:psearch_test.py:34 No more results | |||
Passed | suites/pwp_storage/storage_test.py::test_check_password_scheme[CRYPT] | 0.07 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/pwp_storage/storage_test.py::test_check_password_scheme[SHA] | 0.53 | |
No log output captured. | |||
Passed | suites/pwp_storage/storage_test.py::test_check_password_scheme[SSHA] | 0.05 | |
No log output captured. | |||
Passed | suites/pwp_storage/storage_test.py::test_check_password_scheme[SHA256] | 0.07 | |
No log output captured. | |||
Passed | suites/pwp_storage/storage_test.py::test_check_password_scheme[SSHA256] | 0.05 | |
No log output captured. | |||
Passed | suites/pwp_storage/storage_test.py::test_check_password_scheme[SHA384] | 0.06 | |
No log output captured. | |||
Passed | suites/pwp_storage/storage_test.py::test_check_password_scheme[SSHA384] | 0.05 | |
No log output captured. | |||
Passed | suites/pwp_storage/storage_test.py::test_check_password_scheme[SHA512] | 0.05 | |
No log output captured. | |||
Passed | suites/pwp_storage/storage_test.py::test_check_password_scheme[SSHA512] | 0.05 | |
No log output captured. | |||
Passed | suites/pwp_storage/storage_test.py::test_check_password_scheme[MD5] | 0.06 | |
No log output captured. | |||
Passed | suites/pwp_storage/storage_test.py::test_check_password_scheme[PBKDF2_SHA256] | 0.09 | |
No log output captured. | |||
Passed | suites/pwp_storage/storage_test.py::test_clear_scheme | 0.13 | |
No log output captured. | |||
Passed | suites/pwp_storage/storage_test.py::test_check_two_scheme | 4.39 | |
No log output captured. | |||
Passed | suites/pwp_storage/storage_test.py::test_check_pbkdf2_sha256 | 5.02 | |
No log output captured. | |||
Passed | suites/pwp_storage/storage_test.py::test_check_ssha512 | 6.08 | |
No log output captured. | |||
Passed | suites/referint_plugin/rename_test.py::test_rename_large_subtree | 253.63 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 6ed8bb91-7b2d-4749-884b-f552256f83f4 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 4b9cb40c-d636-41d4-af8c-6c00d16563c0 / got description=6ed8bb91-7b2d-4749-884b-f552256f83f4) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 76c88f0a-0373-406c-8bc0-f81ee2504d81 / got description=4b9cb40c-d636-41d4-af8c-6c00d16563c0) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 76c88f0a-0373-406c-8bc0-f81ee2504d81 / got description=4b9cb40c-d636-41d4-af8c-6c00d16563c0) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 76c88f0a-0373-406c-8bc0-f81ee2504d81 / got description=4b9cb40c-d636-41d4-af8c-6c00d16563c0) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 76c88f0a-0373-406c-8bc0-f81ee2504d81 / got description=4b9cb40c-d636-41d4-af8c-6c00d16563c0) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 76c88f0a-0373-406c-8bc0-f81ee2504d81 / got description=4b9cb40c-d636-41d4-af8c-6c00d16563c0) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 76c88f0a-0373-406c-8bc0-f81ee2504d81 / got description=4b9cb40c-d636-41d4-af8c-6c00d16563c0) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 76c88f0a-0373-406c-8bc0-f81ee2504d81 / got description=4b9cb40c-d636-41d4-af8c-6c00d16563c0) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 76c88f0a-0373-406c-8bc0-f81ee2504d81 / got description=4b9cb40c-d636-41d4-af8c-6c00d16563c0) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 76c88f0a-0373-406c-8bc0-f81ee2504d81 / got description=4b9cb40c-d636-41d4-af8c-6c00d16563c0) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 76c88f0a-0373-406c-8bc0-f81ee2504d81 / got description=4b9cb40c-d636-41d4-af8c-6c00d16563c0) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 76c88f0a-0373-406c-8bc0-f81ee2504d81 / got description=4b9cb40c-d636-41d4-af8c-6c00d16563c0) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 76c88f0a-0373-406c-8bc0-f81ee2504d81 / got description=4b9cb40c-d636-41d4-af8c-6c00d16563c0) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 76c88f0a-0373-406c-8bc0-f81ee2504d81 / got description=4b9cb40c-d636-41d4-af8c-6c00d16563c0) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 76c88f0a-0373-406c-8bc0-f81ee2504d81 / got description=4b9cb40c-d636-41d4-af8c-6c00d16563c0) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 76c88f0a-0373-406c-8bc0-f81ee2504d81 / got description=4b9cb40c-d636-41d4-af8c-6c00d16563c0) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 76c88f0a-0373-406c-8bc0-f81ee2504d81 / got description=4b9cb40c-d636-41d4-af8c-6c00d16563c0) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 76c88f0a-0373-406c-8bc0-f81ee2504d81 / got description=4b9cb40c-d636-41d4-af8c-6c00d16563c0) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 76c88f0a-0373-406c-8bc0-f81ee2504d81 / got description=4b9cb40c-d636-41d4-af8c-6c00d16563c0) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 76c88f0a-0373-406c-8bc0-f81ee2504d81 / got description=4b9cb40c-d636-41d4-af8c-6c00d16563c0) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 76c88f0a-0373-406c-8bc0-f81ee2504d81 / got description=4b9cb40c-d636-41d4-af8c-6c00d16563c0) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 76c88f0a-0373-406c-8bc0-f81ee2504d81 / got description=4b9cb40c-d636-41d4-af8c-6c00d16563c0) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 76c88f0a-0373-406c-8bc0-f81ee2504d81 / got description=4b9cb40c-d636-41d4-af8c-6c00d16563c0) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 76c88f0a-0373-406c-8bc0-f81ee2504d81 / got description=4b9cb40c-d636-41d4-af8c-6c00d16563c0) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 76c88f0a-0373-406c-8bc0-f81ee2504d81 / got description=4b9cb40c-d636-41d4-af8c-6c00d16563c0) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 76c88f0a-0373-406c-8bc0-f81ee2504d81 / got description=4b9cb40c-d636-41d4-af8c-6c00d16563c0) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 76c88f0a-0373-406c-8bc0-f81ee2504d81 / got description=4b9cb40c-d636-41d4-af8c-6c00d16563c0) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 76c88f0a-0373-406c-8bc0-f81ee2504d81 / got description=4b9cb40c-d636-41d4-af8c-6c00d16563c0) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 76c88f0a-0373-406c-8bc0-f81ee2504d81 / got description=4b9cb40c-d636-41d4-af8c-6c00d16563c0) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 76c88f0a-0373-406c-8bc0-f81ee2504d81 / got description=4b9cb40c-d636-41d4-af8c-6c00d16563c0) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 76c88f0a-0373-406c-8bc0-f81ee2504d81 / got description=4b9cb40c-d636-41d4-af8c-6c00d16563c0) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 76c88f0a-0373-406c-8bc0-f81ee2504d81 / got description=4b9cb40c-d636-41d4-af8c-6c00d16563c0) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 76c88f0a-0373-406c-8bc0-f81ee2504d81 / got description=4b9cb40c-d636-41d4-af8c-6c00d16563c0) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 76c88f0a-0373-406c-8bc0-f81ee2504d81 / got description=4b9cb40c-d636-41d4-af8c-6c00d16563c0) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 76c88f0a-0373-406c-8bc0-f81ee2504d81 / got description=4b9cb40c-d636-41d4-af8c-6c00d16563c0) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0961d2c0-86e4-4f85-a8c3-fb9a9cbd155c / got description=76c88f0a-0373-406c-8bc0-f81ee2504d81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0961d2c0-86e4-4f85-a8c3-fb9a9cbd155c / got description=76c88f0a-0373-406c-8bc0-f81ee2504d81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0961d2c0-86e4-4f85-a8c3-fb9a9cbd155c / got description=76c88f0a-0373-406c-8bc0-f81ee2504d81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0961d2c0-86e4-4f85-a8c3-fb9a9cbd155c / got description=76c88f0a-0373-406c-8bc0-f81ee2504d81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0961d2c0-86e4-4f85-a8c3-fb9a9cbd155c / got description=76c88f0a-0373-406c-8bc0-f81ee2504d81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0961d2c0-86e4-4f85-a8c3-fb9a9cbd155c / got description=76c88f0a-0373-406c-8bc0-f81ee2504d81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0961d2c0-86e4-4f85-a8c3-fb9a9cbd155c / got description=76c88f0a-0373-406c-8bc0-f81ee2504d81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0961d2c0-86e4-4f85-a8c3-fb9a9cbd155c / got description=76c88f0a-0373-406c-8bc0-f81ee2504d81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0961d2c0-86e4-4f85-a8c3-fb9a9cbd155c / got description=76c88f0a-0373-406c-8bc0-f81ee2504d81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0961d2c0-86e4-4f85-a8c3-fb9a9cbd155c / got description=76c88f0a-0373-406c-8bc0-f81ee2504d81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0961d2c0-86e4-4f85-a8c3-fb9a9cbd155c / got description=76c88f0a-0373-406c-8bc0-f81ee2504d81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0961d2c0-86e4-4f85-a8c3-fb9a9cbd155c / got description=76c88f0a-0373-406c-8bc0-f81ee2504d81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0961d2c0-86e4-4f85-a8c3-fb9a9cbd155c / got description=76c88f0a-0373-406c-8bc0-f81ee2504d81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0961d2c0-86e4-4f85-a8c3-fb9a9cbd155c / got description=76c88f0a-0373-406c-8bc0-f81ee2504d81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0961d2c0-86e4-4f85-a8c3-fb9a9cbd155c / got description=76c88f0a-0373-406c-8bc0-f81ee2504d81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0961d2c0-86e4-4f85-a8c3-fb9a9cbd155c / got description=76c88f0a-0373-406c-8bc0-f81ee2504d81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0961d2c0-86e4-4f85-a8c3-fb9a9cbd155c / got description=76c88f0a-0373-406c-8bc0-f81ee2504d81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0961d2c0-86e4-4f85-a8c3-fb9a9cbd155c / got description=76c88f0a-0373-406c-8bc0-f81ee2504d81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0961d2c0-86e4-4f85-a8c3-fb9a9cbd155c / got description=76c88f0a-0373-406c-8bc0-f81ee2504d81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0961d2c0-86e4-4f85-a8c3-fb9a9cbd155c / got description=76c88f0a-0373-406c-8bc0-f81ee2504d81) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0961d2c0-86e4-4f85-a8c3-fb9a9cbd155c / got description=76c88f0a-0373-406c-8bc0-f81ee2504d81) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working | |||
Passed | suites/replication/acceptance_test.py::test_add_entry | 10.03 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master3 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'master3', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master4 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39004, 'ldap-secureport': 63704, 'server-id': 'master4', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect a899a139-27ed-4d16-9825-3c85672d36a6 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ea65b550-8592-41c4-b27f-901a360801e2 / got description=a899a139-27ed-4d16-9825-3c85672d36a6) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master3 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect f32f2ac0-6e93-483f-a6fa-246d0b84731f / got description=ea65b550-8592-41c4-b27f-901a360801e2) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ae9bd1c8-3115-41cb-94be-191ec990cd6d / got description=f32f2ac0-6e93-483f-a6fa-246d0b84731f) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master4 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 5844dddd-8820-4ffc-a9ac-db1d95c1ceae / got description=ae9bd1c8-3115-41cb-94be-191ec990cd6d) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect c2380069-781d-481f-822a-d8c2f9bc96e7 / got description=5844dddd-8820-4ffc-a9ac-db1d95c1ceae) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master3 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master4 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master3 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master4 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master2 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master4 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master4 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master4 to master2 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master4 to master3 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:41 Adding entry uid=mmrepl_test,dc=example,dc=com | |||
Passed | suites/replication/acceptance_test.py::test_modify_entry | 3.10 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:41 Adding entry uid=mmrepl_test,dc=example,dc=com [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:45 Deleting entry uid=mmrepl_test,dc=example,dc=com -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:128 Modifying entry uid=mmrepl_test,dc=example,dc=com - add operation [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:138 Modifying entry uid=mmrepl_test,dc=example,dc=com - replace operation [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:146 Modifying entry uid=mmrepl_test,dc=example,dc=com - delete operation | |||
Passed | suites/replication/acceptance_test.py::test_delete_entry | 10.05 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:41 Adding entry uid=mmrepl_test,dc=example,dc=com [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:45 Deleting entry uid=mmrepl_test,dc=example,dc=com -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:168 Deleting entry uid=mmrepl_test,dc=example,dc=com during the test | |||
Passed | suites/replication/acceptance_test.py::test_modrdn_entry[0] | 20.12 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:41 Adding entry uid=mmrepl_test,dc=example,dc=com -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:192 Modify entry RDN uid=mmrepl_test,dc=example,dc=com [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:211 Remove entry with new RDN uid=newrdn,dc=example,dc=com | |||
Passed | suites/replication/acceptance_test.py::test_modrdn_entry[1] | 20.08 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:41 Adding entry uid=mmrepl_test,dc=example,dc=com -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:192 Modify entry RDN uid=mmrepl_test,dc=example,dc=com [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:211 Remove entry with new RDN uid=newrdn,dc=example,dc=com | |||
Passed | suites/replication/acceptance_test.py::test_modrdn_after_pause | 14.68 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:237 Adding entry uid=mmrepl_test,dc=example,dc=com [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:252 Pause all replicas [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:255 Modify entry RDN uid=mmrepl_test,dc=example,dc=com [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:263 Resume all replicas [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:266 Wait for replication to happen [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:273 Remove entry with new RDN uid=newrdn,dc=example,dc=com | |||
Passed | suites/replication/acceptance_test.py::test_modify_stripattrs | 0.03 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:295 Modify nsds5replicastripattrs with b'modifiersname modifytimestamp' [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:298 Check nsds5replicastripattrs for b'modifiersname modifytimestamp' | |||
Passed | suites/replication/acceptance_test.py::test_new_suffix | 11.10 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:62 Adding suffix:o=test_repl and backend: repl_base to master1 [32mINFO [0m lib389:backend.py:80 List backend with suffix=o=test_repl [32mINFO [0m lib389:backend.py:290 Creating a local backend [32mINFO [0m lib389:backend.py:76 List backend cn=repl_base,cn=ldbm database,cn=plugins,cn=config [32mINFO [0m lib389:__init__.py:1713 Found entry dn: cn=repl_base,cn=ldbm database,cn=plugins,cn=config cn: repl_base nsslapd-cachememsize: 512000 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-master1/db/repl_base nsslapd-dncachememsize: 16777216 nsslapd-readonly: off nsslapd-require-index: off nsslapd-require-internalop-index: off nsslapd-suffix: o=test_repl objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance [32mINFO [0m lib389:mappingTree.py:154 Entry dn: cn="o=test_repl",cn=mapping tree,cn=config cn: o=test_repl nsslapd-backend: repl_base nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree [32mINFO [0m lib389:__init__.py:1713 Found entry dn: cn=o\3Dtest_repl,cn=mapping tree,cn=config cn: o=test_repl nsslapd-backend: repl_base nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:62 Adding suffix:o=test_repl and backend: repl_base to master2 [32mINFO [0m lib389:backend.py:80 List backend with suffix=o=test_repl [32mINFO [0m lib389:backend.py:290 Creating a local backend [32mINFO [0m lib389:backend.py:76 List backend cn=repl_base,cn=ldbm database,cn=plugins,cn=config [32mINFO [0m lib389:__init__.py:1713 Found entry dn: cn=repl_base,cn=ldbm database,cn=plugins,cn=config cn: repl_base nsslapd-cachememsize: 512000 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-master2/db/repl_base nsslapd-dncachememsize: 16777216 nsslapd-readonly: off nsslapd-require-index: off nsslapd-require-internalop-index: off nsslapd-suffix: o=test_repl objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance [32mINFO [0m lib389:mappingTree.py:154 Entry dn: cn="o=test_repl",cn=mapping tree,cn=config cn: o=test_repl nsslapd-backend: repl_base nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree [32mINFO [0m lib389:__init__.py:1713 Found entry dn: cn=o\3Dtest_repl,cn=mapping tree,cn=config cn: o=test_repl nsslapd-backend: repl_base nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:62 Adding suffix:o=test_repl and backend: repl_base to master3 [32mINFO [0m lib389:backend.py:80 List backend with suffix=o=test_repl [32mINFO [0m lib389:backend.py:290 Creating a local backend [32mINFO [0m lib389:backend.py:76 List backend cn=repl_base,cn=ldbm database,cn=plugins,cn=config [32mINFO [0m lib389:__init__.py:1713 Found entry dn: cn=repl_base,cn=ldbm database,cn=plugins,cn=config cn: repl_base nsslapd-cachememsize: 512000 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-master3/db/repl_base nsslapd-dncachememsize: 16777216 nsslapd-readonly: off nsslapd-require-index: off nsslapd-require-internalop-index: off nsslapd-suffix: o=test_repl objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance [32mINFO [0m lib389:mappingTree.py:154 Entry dn: cn="o=test_repl",cn=mapping tree,cn=config cn: o=test_repl nsslapd-backend: repl_base nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree [32mINFO [0m lib389:__init__.py:1713 Found entry dn: cn=o\3Dtest_repl,cn=mapping tree,cn=config cn: o=test_repl nsslapd-backend: repl_base nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:62 Adding suffix:o=test_repl and backend: repl_base to master4 [32mINFO [0m lib389:backend.py:80 List backend with suffix=o=test_repl [32mINFO [0m lib389:backend.py:290 Creating a local backend [32mINFO [0m lib389:backend.py:76 List backend cn=repl_base,cn=ldbm database,cn=plugins,cn=config [32mINFO [0m lib389:__init__.py:1713 Found entry dn: cn=repl_base,cn=ldbm database,cn=plugins,cn=config cn: repl_base nsslapd-cachememsize: 512000 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-master4/db/repl_base nsslapd-dncachememsize: 16777216 nsslapd-readonly: off nsslapd-require-index: off nsslapd-require-internalop-index: off nsslapd-suffix: o=test_repl objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance [32mINFO [0m lib389:mappingTree.py:154 Entry dn: cn="o=test_repl",cn=mapping tree,cn=config cn: o=test_repl nsslapd-backend: repl_base nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree [32mINFO [0m lib389:__init__.py:1713 Found entry dn: cn=o\3Dtest_repl,cn=mapping tree,cn=config cn: o=test_repl nsslapd-backend: repl_base nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ccb82a41-7397-43db-9a1a-7b640ae82140 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect c7d5608e-c543-442c-9d03-7d7d85c749f4 / got description=ccb82a41-7397-43db-9a1a-7b640ae82140) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 37a19280-d8d3-434a-b520-acb31150f382 / got description=c7d5608e-c543-442c-9d03-7d7d85c749f4) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect c2f11582-3397-4d18-8299-05126b77ab08 / got description=37a19280-d8d3-434a-b520-acb31150f382) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working | |||
Passed | suites/replication/acceptance_test.py::test_many_attrs | 20.27 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:41 Adding entry uid=mmrepl_test,dc=example,dc=com -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:353 Modifying entry uid=mmrepl_test,dc=example,dc=com - 10 add operations [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:357 Check that everything was properly replicated after an add operation [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:362 Modifying entry uid=mmrepl_test,dc=example,dc=com - 4 delete operations for [b'test0', b'test4', b'test7', b'test9'] [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:366 Check that everything was properly replicated after a delete operation | |||
Passed | suites/replication/acceptance_test.py::test_double_delete | 10.06 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:41 Adding entry uid=mmrepl_test,dc=example,dc=com [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:45 Deleting entry uid=mmrepl_test,dc=example,dc=com -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:386 Deleting entry uid=mmrepl_test,dc=example,dc=com from master1 [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:389 Deleting entry uid=mmrepl_test,dc=example,dc=com from master2 [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:393 Entry uid=mmrepl_test,dc=example,dc=com wasn't found master2. It is expected. [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:395 Make searches to check if server is alive | |||
Passed | suites/replication/acceptance_test.py::test_password_repl_error | 23.28 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:41 Adding entry uid=mmrepl_test,dc=example,dc=com -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:416 Clean the error log [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:419 Set replication loglevel [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:422 Modifying entry uid=mmrepl_test,dc=example,dc=com - change userpassword on master 2 [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:430 Restart the servers to flush the logs [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:439 Check the error log for the error with uid=mmrepl_test,dc=example,dc=com | |||
Passed | suites/replication/acceptance_test.py::test_invalid_agmt | 0.06 | |
-------------------------------Captured log call-------------------------------- [31mCRITICAL[0m lib389:agreement.py:1026 Failed to add replication agreement: {'msgtype': 105, 'msgid': 5, 'result': 53, 'desc': 'Server is unwilling to perform', 'ctrls': []} [32mINFO [0m lib389:acceptance_test.py:464 Invalid repl agreement correctly rejected | |||
Passed | suites/replication/acceptance_test.py::test_warining_for_invalid_replica | 0.04 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:494 Set nsds5ReplicaBackoffMin to 20 [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:497 Set nsds5ReplicaBackoffMax to 10 [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:499 Resetting configuration: nsds5ReplicaBackoffMin [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:501 Check the error log for the error | |||
Passed | suites/replication/acceptance_test.py::test_csngen_task | 11.02 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect dd99fa8a-85fb-4fe3-b6fa-1c3e2b7b437f / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect cf5d8625-f87d-4eac-a7fa-be8d6a13e79b / got description=dd99fa8a-85fb-4fe3-b6fa-1c3e2b7b437f) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:523 Check the error log contains strings showing csn generator is tested | |||
Passed | suites/replication/acceptance_test.py::test_csnpurge_large_valueset | 6.63 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:574 nsds5ReplicaPurgeDelay to 5 | |||
Passed | suites/replication/acceptance_test.py::test_urp_trigger_substring_search | 609.14 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:604 Set nsslapd-plugin-logging to on [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:634 Entry not yet replicated on M2, wait a bit [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:638 Check that on M2, URP as not triggered such internal search [32mINFO [0m tests.suites.replication.acceptance_test:acceptance_test.py:641 found line: [] | |||
Passed | suites/replication/cascading_test.py::test_basic_with_hub | 18.96 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for hub1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39101, 'ldap-secureport': 63801, 'server-id': 'hub1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for consumer1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:524 Creating replication topology. [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39101 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39101 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39101 is NOT working (expect a5d19d91-fbe6-459a-ad7e-12e9fead26c8 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39101 is working [32mINFO [0m lib389.replica:replica.py:2211 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39101 [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39101 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is was created [32mINFO [0m lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39101 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 7883da2f-42a1-4b8c-b98f-1da1f21accb5 / got description=a5d19d91-fbe6-459a-ad7e-12e9fead26c8) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is working -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.cascading_test:cascading_test.py:45 update cn=101,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config to add nsDS5ReplicatedAttributeListTotal [32mINFO [0m tests.suites.replication.cascading_test:cascading_test.py:45 update cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config to add nsDS5ReplicatedAttributeListTotal [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect abab6b4d-8d66-4c07-8cac-265213dfd83c / got description=7883da2f-42a1-4b8c-b98f-1da1f21accb5) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 15399a93-c2fc-47d7-8911-69536d9dd28c / got description=abab6b4d-8d66-4c07-8cac-265213dfd83c) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 60a0d921-2fb7-4561-83bb-414d2c53a376 / got description=15399a93-c2fc-47d7-8911-69536d9dd28c) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is working | |||
Passed | suites/replication/changelog_encryption_test.py::test_cl_encryption_setup_process | 36.33 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for consumer1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:169 Joining consumer consumer1 from master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 5f30a5c1-e7b3-4b0d-a909-8a6fb0bead25 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is working [32mINFO [0m lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 [32mINFO [0m lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 already exists -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.changelog_encryption_test:changelog_encryption_test.py:43 Enable TLS ... [32mINFO [0m tests.suites.replication.changelog_encryption_test:changelog_encryption_test.py:48 Export changelog ... [32mINFO [0m tests.suites.replication.changelog_encryption_test:changelog_encryption_test.py:55 Enable changelog encryption ... [32mINFO [0m tests.suites.replication.changelog_encryption_test:changelog_encryption_test.py:68 Import changelog ... [32mINFO [0m tests.suites.replication.changelog_encryption_test:changelog_encryption_test.py:73 Test replication is still working ... | |||
Passed | suites/replication/changelog_test.py::test_dsconf_dump_changelog_files_removed | 6.15 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 51683165-690d-4055-8a51-8e03f8b1a86b / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 41c08599-a57c-4ab9-a982-235e9ef35820 / got description=51683165-690d-4055-8a51-8e03f8b1a86b) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:222 Remove .ldif files, if present in: /var/lib/dirsrv/slapd-master1/ldif [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:222 Remove .done files, if present in: /var/lib/dirsrv/slapd-master1/ldif [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:52 Adding user replusr [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:65 Modify RDN of user uid=replusr,ou=People,dc=example,dc=com [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:72 Deleting user: uid=cl5usr,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:386 Use dsconf export-changelog with invalid parameters [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:388 Command used : ['/usr/sbin/dsconf', 'ldap://LOCALHOST:39001', '-D', 'cn=Directory Manager', '-w', 'badpasswd', 'replication', 'export-changelog'] [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:391 output message : b'No action provided, here is some --help.\nusage: dsconf [-h] [-v] [-D BINDDN] [-w BINDPW] [-W] [-y PWDFILE] [-b BASEDN]\n [-Z] [-j]\n instance\n {backend,backup,chaining,config,directory_manager,monitor,plugin,pwpolicy,localpwp,replication,repl-agmt,repl-winsync-agmt,repl-tasks,sasl,security,schema,repl-conflict}\n ...\n\npositional arguments:\n instance The instance name OR the LDAP url to connect to, IE\n localhost, ldap://mai.example.com:389\n {backend,backup,chaining,config,directory_manager,monitor,plugin,pwpolicy,localpwp,replication,repl-agmt,repl-winsync-agmt,repl-tasks,sasl,security,schema,repl-conflict}\n resources to act upon\n backend Manage database suffixes and backends\n backup Manage online backups\n chaining Manage database chaining/database links\n config Manage server configuration\n directory_manager Manage the directory manager account\n monitor Monitor the state of the instance\n plugin Manage plugins available on the server\n pwpolicy Get and set the global password policy settings\n localpwp Manage local (user/subtree) password policies\n replication Configure replication for a suffix\n repl-agmt Manage replication agreements\n repl-winsync-agmt Manage Winsync Agreements\n repl-tasks Manage replication tasks\n sasl Query and manipulate SASL mappings\n security Query and manipulate security options\n schema Query and manipulate schema\n repl-conflict Manage replication conflicts\n\noptional arguments:\n -h, --help show this help message and exit\n -v, --verbose Display verbose operation tracing during command\n execution\n -D BINDDN, --binddn BINDDN\n The account to bind as for executing operations\n -w BINDPW, --bindpw BINDPW\n Password for binddn\n -W, --prompt Prompt for password for the bind DN\n -y PWDFILE, --pwdfile PWDFILE\n Specifies a file containing the password for the\n binddn\n -b BASEDN, --basedn BASEDN\n Basedn (root naming context) of the instance to manage\n -Z, --starttls Connect with StartTLS\n -j, --json Return result in JSON object\n' [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:396 Use dsconf replication changelog without -l option: no generated ldif files should be present in /var/lib/dirsrv/slapd-master1/ldif [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:399 Command used : ['/usr/sbin/dsconf', 'ldap://LOCALHOST:39001', '-D', 'cn=Directory Manager', '-w', 'password', 'replication', 'export-changelog', 'default', '-r', 'dc=example,dc=com'] [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:404 Wait for all dsconf export-changelog files to be generated [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:407 Check if dsconf export-changelog generated .ldif.done files are present - should not [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:413 All dsconf export-changelog generated .ldif files have been successfully removed from /var/lib/dirsrv/slapd-master1/ldif [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:416 Use dsconf replication changelog with -l option: generated ldif files should be kept in /var/lib/dirsrv/slapd-master1/ldif [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:419 Command used : ['/usr/sbin/dsconf', 'ldap://LOCALHOST:39001', '-D', 'cn=Directory Manager', '-w', 'password', 'replication', 'export-changelog', 'to-ldif', '-o', '/var/lib/dirsrv/slapd-master1/ldif/test.ldif', '-r', 'dc=example,dc=com', '-l'] [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:424 Wait for all dsconf export-changelog files to be generated [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:427 Check if dsconf export-changelog generated .ldif.done files are present - should be [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:431 Success : ldif file /var/lib/dirsrv/slapd-master1/ldif/c4e2bd04-230311eb-b7a1d2c7-e74079ff_cl.ldif.done is present | |||
Passed | suites/replication/changelog_test.py::test_verify_changelog | 0.15 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:454 LDAP operations add, modify, modrdn and delete [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:52 Adding user replusr [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:65 Modify RDN of user uid=replusr,ou=People,dc=example,dc=com [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:72 Deleting user: uid=cl5usr,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:79 Dump changelog using nss5task and check if ldap operations are logged [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:88 Remove ldif files, if present in: /var/lib/dirsrv/slapd-master1/ldif [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:99 No existing changelog ldif files present [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:101 Running nsds5task to dump changelog database to a file [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:104 Check if changelog ldif file exist in: /var/lib/dirsrv/slapd-master1/ldif [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:108 Changelog ldif file exist: /var/lib/dirsrv/slapd-master1/ldif/c4e2bd04-230311eb-b7a1d2c7-e74079ff_cl.ldif [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:118 Checking changelog ldif file for ldap operations [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:123 Checking if all required changetype operations are present [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:129 Valid ldap operations: {'delete', 'modify', 'add', 'modrdn'} [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:130 Ldap operations found: {'delete', 'modify', 'add', 'modrdn'} | |||
Passed | suites/replication/changelog_test.py::test_verify_changelog_online_backup | 7.17 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:481 Run db2bak script to take database backup [32mINFO [0m lib389:tasks.py:619 Backup task backup_11092020_222134 completed successfully [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:493 Database backup is created successfully [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:498 Run bak2db to restore directory server [32mINFO [0m lib389:tasks.py:673 Restore task restore_11092020_222136 completed successfully [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:505 LDAP operations add, modify, modrdn and delete [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:52 Adding user replusr [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:65 Modify RDN of user uid=replusr,ou=People,dc=example,dc=com [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:72 Deleting user: uid=cl5usr,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:79 Dump changelog using nss5task and check if ldap operations are logged [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:88 Remove ldif files, if present in: /var/lib/dirsrv/slapd-master1/ldif [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:97 Existing changelog ldif file: /var/lib/dirsrv/slapd-master1/ldif/c4e2bd04-230311eb-b7a1d2c7-e74079ff_cl.ldif removed [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:99 No existing changelog ldif files present [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:101 Running nsds5task to dump changelog database to a file [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:104 Check if changelog ldif file exist in: /var/lib/dirsrv/slapd-master1/ldif [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:108 Changelog ldif file exist: /var/lib/dirsrv/slapd-master1/ldif/c4e2bd04-230311eb-b7a1d2c7-e74079ff_cl.ldif [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:118 Checking changelog ldif file for ldap operations [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:123 Checking if all required changetype operations are present [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:129 Valid ldap operations: {'delete', 'modify', 'add', 'modrdn'} [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:130 Ldap operations found: {'delete', 'modify', 'add', 'modrdn'} | |||
Passed | suites/replication/changelog_test.py::test_verify_changelog_offline_backup | 8.66 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:534 Run db2bak to take database backup [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:541 Run bak2db to restore directory server [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:554 Database backup is created successfully [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:559 LDAP operations add, modify, modrdn and delete [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:52 Adding user replusr [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:65 Modify RDN of user uid=replusr,ou=People,dc=example,dc=com [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:72 Deleting user: uid=cl5usr,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:79 Dump changelog using nss5task and check if ldap operations are logged [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:88 Remove ldif files, if present in: /var/lib/dirsrv/slapd-master1/ldif [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:97 Existing changelog ldif file: /var/lib/dirsrv/slapd-master1/ldif/c4e2bd04-230311eb-b7a1d2c7-e74079ff_cl.ldif removed [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:99 No existing changelog ldif files present [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:101 Running nsds5task to dump changelog database to a file [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:104 Check if changelog ldif file exist in: /var/lib/dirsrv/slapd-master1/ldif [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:108 Changelog ldif file exist: /var/lib/dirsrv/slapd-master1/ldif/c4e2bd04-230311eb-b7a1d2c7-e74079ff_cl.ldif [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:118 Checking changelog ldif file for ldap operations [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:123 Checking if all required changetype operations are present [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:129 Valid ldap operations: {'delete', 'modify', 'add', 'modrdn'} [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:130 Ldap operations found: {'delete', 'modify', 'add', 'modrdn'} | |||
Passed | suites/replication/changelog_test.py::test_changelog_maxage | 0.36 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:144 Testing Ticket 47669 - Test duration syntax in the changelogs [32mINFO [0m lib389:changelog_test.py:147 Bind as cn=Directory Manager -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:581 1. Test nsslapd-changelogmaxage in cn=changelog5,cn=config [32mINFO [0m lib389:changelog_test.py:584 Bind as cn=Directory Manager [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogmaxage: 12345 -- valid [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogmaxage: 10s -- valid [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogmaxage: 30M -- valid [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogmaxage: 12h -- valid [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogmaxage: 2D -- valid [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogmaxage: 4w -- valid [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:181 Test nsslapd-changelogmaxage: -123 -- invalid [31m[1mERROR [0m tests.suites.replication.changelog_test:changelog_test.py:186 Expectedly failed to add nsslapd-changelogmaxage: -123 to cn=changelog,cn=userRoot,cn=ldbm database,cn=plugins,cn=config: error Server is unwilling to perform [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:181 Test nsslapd-changelogmaxage: xyz -- invalid [31m[1mERROR [0m tests.suites.replication.changelog_test:changelog_test.py:186 Expectedly failed to add nsslapd-changelogmaxage: xyz to cn=changelog,cn=userRoot,cn=ldbm database,cn=plugins,cn=config: error Server is unwilling to perform | |||
Passed | suites/replication/changelog_test.py::test_ticket47669_changelog_triminterval | 0.10 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:614 2. Test nsslapd-changelogtrim-interval in cn=changelog5,cn=config [32mINFO [0m lib389:changelog_test.py:617 Bind as cn=Directory Manager [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogtrim-interval: 12345 -- valid [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogtrim-interval: 10s -- valid [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogtrim-interval: 30M -- valid [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogtrim-interval: 12h -- valid [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogtrim-interval: 2D -- valid [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogtrim-interval: 4w -- valid [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:181 Test nsslapd-changelogtrim-interval: -123 -- invalid [31m[1mERROR [0m tests.suites.replication.changelog_test:changelog_test.py:186 Expectedly failed to add nsslapd-changelogtrim-interval: -123 to cn=changelog,cn=userRoot,cn=ldbm database,cn=plugins,cn=config: error Server is unwilling to perform [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:181 Test nsslapd-changelogtrim-interval: xyz -- invalid [31m[1mERROR [0m tests.suites.replication.changelog_test:changelog_test.py:186 Expectedly failed to add nsslapd-changelogtrim-interval: xyz to cn=changelog,cn=userRoot,cn=ldbm database,cn=plugins,cn=config: error Server is unwilling to perform | |||
Passed | suites/replication/changelog_test.py::test_retrochangelog_maxage | 0.18 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:683 4. Test nsslapd-changelogmaxage in cn=Retro Changelog Plugin,cn=plugins,cn=config [32mINFO [0m lib389:changelog_test.py:686 Bind as cn=Directory Manager [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogmaxage: 12345 -- valid [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogmaxage: 10s -- valid [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogmaxage: 30M -- valid [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogmaxage: 12h -- valid [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogmaxage: 2D -- valid [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogmaxage: 4w -- valid [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:181 Test nsslapd-changelogmaxage: -123 -- invalid [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:181 Test nsslapd-changelogmaxage: xyz -- invalid [32mINFO [0m lib389:changelog_test.py:698 ticket47669 was successfully verified. | |||
Passed | suites/replication/changelog_test.py::test_retrochangelog_trimming_crash | 24.88 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.changelog_test:changelog_test.py:722 1. Test retroCL trimming crash in cn=Retro Changelog Plugin,cn=plugins,cn=config [32mINFO [0m lib389:changelog_test.py:726 ticket50736 start verification [32mINFO [0m lib389:changelog_test.py:742 ticket 50736 was successfully verified. | |||
Passed | suites/replication/changelog_trimming_test.py::test_max_age | 10.49 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.changelog_trimming_test:changelog_trimming_test.py:90 Testing changelog trimming interval with max age... | |||
Passed | suites/replication/changelog_trimming_test.py::test_max_entries | 12.95 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.changelog_trimming_test:changelog_trimming_test.py:137 Testing changelog triming interval with max entries... | |||
Passed | suites/replication/cleanallruv_max_tasks_test.py::test_max_tasks | 55.16 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master3 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'master3', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master4 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39004, 'ldap-secureport': 63704, 'server-id': 'master4', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 9a0c26f3-9b85-4685-8b4e-f270189e2c39 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 906c171a-d593-4604-a243-8008b2185829 / got description=9a0c26f3-9b85-4685-8b4e-f270189e2c39) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master3 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 0c3a1f2c-68e6-440e-b870-390b5565bbdc / got description=906c171a-d593-4604-a243-8008b2185829) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 71cb06af-adfa-4491-a101-a75727b28701 / got description=0c3a1f2c-68e6-440e-b870-390b5565bbdc) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master4 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 349890eb-19fe-4fd2-8ea5-bf11e1969da5 / got description=71cb06af-adfa-4491-a101-a75727b28701) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 3a12c1d8-3930-423d-a1f4-3116e3477928 / got description=349890eb-19fe-4fd2-8ea5-bf11e1969da5) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master3 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master4 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master3 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master4 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master2 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master4 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master4 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master4 to master2 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master4 to master3 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created | |||
Passed | suites/replication/cleanallruv_test.py::test_clean | 2.99 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master3 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'master3', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master4 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39004, 'ldap-secureport': 63704, 'server-id': 'master4', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 1e1eae06-4fe9-493b-8308-03de39209778 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect b91fb110-4b9f-48ce-9108-c9e5b496caaa / got description=1e1eae06-4fe9-493b-8308-03de39209778) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master3 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 8a149e57-a164-47bd-abae-5574cb84f958 / got description=b91fb110-4b9f-48ce-9108-c9e5b496caaa) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 9a9c53d4-355f-4b9c-a5b3-664af2fb5999 / got description=8a149e57-a164-47bd-abae-5574cb84f958) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master4 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 55d9bbd1-cc1c-4f09-a508-08fbf72f8b54 / got description=9a9c53d4-355f-4b9c-a5b3-664af2fb5999) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 73ce46ac-aaeb-4d48-abfd-97c6a7e51112 / got description=55d9bbd1-cc1c-4f09-a508-08fbf72f8b54) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 73ce46ac-aaeb-4d48-abfd-97c6a7e51112 / got description=55d9bbd1-cc1c-4f09-a508-08fbf72f8b54) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 73ce46ac-aaeb-4d48-abfd-97c6a7e51112 / got description=55d9bbd1-cc1c-4f09-a508-08fbf72f8b54) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 73ce46ac-aaeb-4d48-abfd-97c6a7e51112 / got description=55d9bbd1-cc1c-4f09-a508-08fbf72f8b54) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 73ce46ac-aaeb-4d48-abfd-97c6a7e51112 / got description=55d9bbd1-cc1c-4f09-a508-08fbf72f8b54) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 73ce46ac-aaeb-4d48-abfd-97c6a7e51112 / got description=55d9bbd1-cc1c-4f09-a508-08fbf72f8b54) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master3 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master4 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master3 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master4 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master2 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master4 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master4 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master4 to master2 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master4 to master3 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created [35mDEBUG [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:153 Wait a bit before the reset - it is required for the slow machines [35mDEBUG [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:155 -------------- BEGIN RESET of m4 ----------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect e3a3e071-1583-4224-9b6d-31b02c51ed71 / got description=73ce46ac-aaeb-4d48-abfd-97c6a7e51112) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect ac39a63e-ad18-49f5-8c1d-d3764f5953d7 / got description=e3a3e071-1583-4224-9b6d-31b02c51ed71) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 3bca321c-44c3-4a15-a723-8b861c882cea / got description=ac39a63e-ad18-49f5-8c1d-d3764f5953d7) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 286cdd5d-1778-45ca-bc5c-86b0e79b2598 / got description=3bca321c-44c3-4a15-a723-8b861c882cea) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect fc7ad48c-954f-4402-9e7f-48a2013aa02d / got description=286cdd5d-1778-45ca-bc5c-86b0e79b2598) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 5a1896e2-b66f-4988-b92f-b150682d8806 / got description=fc7ad48c-954f-4402-9e7f-48a2013aa02d) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 84d9803e-f520-4d7a-abe9-1bcf72ac1a04 / got description=5a1896e2-b66f-4988-b92f-b150682d8806) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 22c9ce09-9d3e-4b73-8a39-64939bb15aab / got description=84d9803e-f520-4d7a-abe9-1bcf72ac1a04) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 22c9ce09-9d3e-4b73-8a39-64939bb15aab / got description=84d9803e-f520-4d7a-abe9-1bcf72ac1a04) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 22c9ce09-9d3e-4b73-8a39-64939bb15aab / got description=84d9803e-f520-4d7a-abe9-1bcf72ac1a04) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 22c9ce09-9d3e-4b73-8a39-64939bb15aab / got description=84d9803e-f520-4d7a-abe9-1bcf72ac1a04) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect a6dc6303-81ca-446c-a8aa-7fc8aca4d902 / got description=22c9ce09-9d3e-4b73-8a39-64939bb15aab) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect cecb883a-36ee-4f9c-9cb4-1e89789d548a / got description=a6dc6303-81ca-446c-a8aa-7fc8aca4d902) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0f9aec2b-6402-43d3-8fbf-371dd987e6cd / got description=cecb883a-36ee-4f9c-9cb4-1e89789d548a) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 4f565a94-fd21-4db5-8517-bfbdca23b2d6 / got description=0f9aec2b-6402-43d3-8fbf-371dd987e6cd) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [35mDEBUG [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:181 -------------- FINISH RESET of m4 ----------------- -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:204 Running test_clean... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:207 test_clean: disable master 4... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:71 test_clean: remove all the agreements to master 4... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:211 test_clean: run the cleanAllRUV task... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:221 test_clean: check all the masters have been cleaned... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:1 (suffix:rid) [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:2 (suffix:rid) [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:3 (suffix:rid) [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:225 test_clean PASSED, restoring master 4... | |||
Passed | suites/replication/cleanallruv_test.py::test_clean_restart | 22.91 | |
-------------------------------Captured log setup------------------------------- [35mDEBUG [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:153 Wait a bit before the reset - it is required for the slow machines [35mDEBUG [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:155 -------------- BEGIN RESET of m4 ----------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0695b9e1-221b-4f1f-9a11-f3428a3dbe63 / got description=58a1c9bc-32f0-46c1-ac3f-e8c1627f0cb9) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 99256ccd-c20f-45cf-855c-224c940fd9a7 / got description=0695b9e1-221b-4f1f-9a11-f3428a3dbe63) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect f6a53608-e18b-4d19-ba78-c7223fa57bb0 / got description=99256ccd-c20f-45cf-855c-224c940fd9a7) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 0a9fe6da-9aa3-429d-84f6-16d9d492dde2 / got description=f6a53608-e18b-4d19-ba78-c7223fa57bb0) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect ffc1e8d3-22a9-4073-afdc-6058981c2698 / got description=0a9fe6da-9aa3-429d-84f6-16d9d492dde2) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect e80cefa1-98d2-4dec-a215-e49a1dc532bb / got description=ffc1e8d3-22a9-4073-afdc-6058981c2698) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect b75db2a9-677c-423b-ae8c-5b6439a7f4b5 / got description=e80cefa1-98d2-4dec-a215-e49a1dc532bb) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 53f840b1-dfe6-4846-9ee1-bec606360d3b / got description=b75db2a9-677c-423b-ae8c-5b6439a7f4b5) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 64591463-0444-4f88-908c-23e544483bf5 / got description=53f840b1-dfe6-4846-9ee1-bec606360d3b) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c02e428c-d708-4dfa-ac50-57612dc03cce / got description=49de153c-90ac-4448-86d6-9c1ce1fd1b1e) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 3a2081c0-d8a1-4048-af09-f0533751f2c8 / got description=c02e428c-d708-4dfa-ac50-57612dc03cce) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [35mDEBUG [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:181 -------------- FINISH RESET of m4 ----------------- -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:256 Running test_clean_restart... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:259 test_clean: disable master 4... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:71 test_clean: remove all the agreements to master 4... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:267 test_clean: run the cleanAllRUV task... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:292 test_clean_restart: check all the masters have been cleaned... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:1 (suffix:rid) [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:2 (suffix:rid) [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:3 (suffix:rid) [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:296 test_clean_restart PASSED, restoring master 4... | |||
Passed | suites/replication/cleanallruv_test.py::test_clean_force | 49.57 | |
-------------------------------Captured log setup------------------------------- [35mDEBUG [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:153 Wait a bit before the reset - it is required for the slow machines [35mDEBUG [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:155 -------------- BEGIN RESET of m4 ----------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect d8ff2f4f-497e-4b32-8943-adfba16de0b0 / got description=b68e44b1-738c-4c7d-a4e5-2ad6f47f6458) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 58a24ef4-092d-4350-94b2-288b8916af08 / got description=d8ff2f4f-497e-4b32-8943-adfba16de0b0) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect c81b1a1c-b2d1-4f2b-8bbc-ec67dfa12443 / got description=58a24ef4-092d-4350-94b2-288b8916af08) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 826f234d-6260-494e-aa68-f914f0ee567f / got description=c81b1a1c-b2d1-4f2b-8bbc-ec67dfa12443) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 17dc1356-61fe-4e04-ab10-52cb1400355d / got description=826f234d-6260-494e-aa68-f914f0ee567f) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 23d21306-b286-4e33-a026-271d21c68cb7 / got description=17dc1356-61fe-4e04-ab10-52cb1400355d) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 0da02e0e-2063-4e54-9332-fa92d7456db1 / got description=23d21306-b286-4e33-a026-271d21c68cb7) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 0da02e0e-2063-4e54-9332-fa92d7456db1 / got description=23d21306-b286-4e33-a026-271d21c68cb7) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 0da02e0e-2063-4e54-9332-fa92d7456db1 / got description=23d21306-b286-4e33-a026-271d21c68cb7) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 0da02e0e-2063-4e54-9332-fa92d7456db1 / got description=23d21306-b286-4e33-a026-271d21c68cb7) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b5e0b2cb-0ea7-4bd6-b5fe-dd5be6d5fb0a / got description=0da02e0e-2063-4e54-9332-fa92d7456db1) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect f2de292d-0386-4935-9fac-071c765be103 / got description=b5e0b2cb-0ea7-4bd6-b5fe-dd5be6d5fb0a) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect fe2862da-5a52-4ede-82f9-070ca6ca5203 / got description=f2de292d-0386-4935-9fac-071c765be103) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 9b2cac39-8b0c-467e-b118-b98585cb0a45 / got description=fe2862da-5a52-4ede-82f9-070ca6ca5203) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 6e776d3c-27ca-402c-b6e1-4876e3ee26f4 / got description=9b2cac39-8b0c-467e-b118-b98585cb0a45) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [35mDEBUG [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:181 -------------- FINISH RESET of m4 ----------------- -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:322 Running test_clean_force... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:71 test_clean_force: remove all the agreements to master 4... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:340 test_clean: run the cleanAllRUV task... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:350 test_clean_force: check all the masters have been cleaned... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:1 (suffix:rid) [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:2 (suffix:rid) [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:3 (suffix:rid) [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:354 test_clean_force PASSED, restoring master 4... | |||
Passed | suites/replication/cleanallruv_test.py::test_abort | 9.32 | |
-------------------------------Captured log setup------------------------------- [35mDEBUG [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:153 Wait a bit before the reset - it is required for the slow machines [35mDEBUG [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:155 -------------- BEGIN RESET of m4 ----------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect fa1bfcbe-6f20-4c93-9266-9506594c6a4a / got description=c609199d-95f4-47f7-b769-6ad140c9c0c9) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect b0d1c857-0ed9-46c6-a1b6-25b0f19831f1 / got description=fa1bfcbe-6f20-4c93-9266-9506594c6a4a) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 1e29c9f5-0f1a-4bea-8a55-4fcb7d44daa4 / got description=b0d1c857-0ed9-46c6-a1b6-25b0f19831f1) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 7ba16f25-e6f9-4b10-a7f9-d06b662a9d85 / got description=1e29c9f5-0f1a-4bea-8a55-4fcb7d44daa4) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 1566af10-42ef-4b85-a970-e8b7d5e1290b / got description=7ba16f25-e6f9-4b10-a7f9-d06b662a9d85) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 45a53861-6bdc-4162-a081-f5dddb3caebc / got description=1566af10-42ef-4b85-a970-e8b7d5e1290b) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ff55434f-6808-4208-9a65-1eaaab707a5e / got description=45a53861-6bdc-4162-a081-f5dddb3caebc) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 84894c70-538e-47b9-b015-0f1da809ffe5 / got description=ff55434f-6808-4208-9a65-1eaaab707a5e) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 0d9e1864-465d-4752-bcc1-ce64543acb17 / got description=84894c70-538e-47b9-b015-0f1da809ffe5) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 403a61b3-a7ae-4cff-b0fa-deab9bad31c6 / got description=0d9e1864-465d-4752-bcc1-ce64543acb17) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 4e1ed91e-5698-41ba-97d6-3f632b48ee3c / got description=403a61b3-a7ae-4cff-b0fa-deab9bad31c6) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 17fd742a-34ee-4f36-bce2-9966cbf5b568 / got description=4e1ed91e-5698-41ba-97d6-3f632b48ee3c) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [35mDEBUG [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:181 -------------- FINISH RESET of m4 ----------------- -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:376 Running test_abort... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:71 test_abort: remove all the agreements to master 4... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:381 test_abort: stop master 2 to freeze the cleanAllRUV task... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:385 test_abort: add the cleanAllRUV task... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:400 test_abort: check master 1 no longer has a cleanAllRUV task... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:406 test_abort: start master 2 to begin the restore process... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:409 test_abort PASSED, restoring master 4... | |||
Passed | suites/replication/cleanallruv_test.py::test_abort_restart | 29.69 | |
-------------------------------Captured log setup------------------------------- [35mDEBUG [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:153 Wait a bit before the reset - it is required for the slow machines [35mDEBUG [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:155 -------------- BEGIN RESET of m4 ----------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b4a42c46-8f7a-4208-af41-4f8d61fc6397 / got description=570f96d7-f122-46a0-b4cb-a89f2a2eacb4) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect d2887dfd-ab4c-449f-ba70-efe898eeb719 / got description=b4a42c46-8f7a-4208-af41-4f8d61fc6397) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 39891f5b-a6d5-4fa0-bee5-c7459e450937 / got description=d2887dfd-ab4c-449f-ba70-efe898eeb719) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 712ed485-a9fe-49d1-90ec-f1ba0f279f06 / got description=39891f5b-a6d5-4fa0-bee5-c7459e450937) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 927989c7-deb4-4704-8e7e-6ddf9bdad3a3 / got description=712ed485-a9fe-49d1-90ec-f1ba0f279f06) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 09dfe741-76bf-4562-b35f-b35125e4dc9d / got description=927989c7-deb4-4704-8e7e-6ddf9bdad3a3) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect c7ff0a88-ad04-47cf-8a76-7ad6cfcf55ec / got description=09dfe741-76bf-4562-b35f-b35125e4dc9d) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ffe2a1ea-8f95-421a-97bd-be6400d1d977 / got description=c7ff0a88-ad04-47cf-8a76-7ad6cfcf55ec) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect a59b0c86-b3f8-4a95-82fc-8f98b71ca12a / got description=ffe2a1ea-8f95-421a-97bd-be6400d1d977) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect b04e4ee1-c074-456c-905f-abaa7dda2e7d / got description=a59b0c86-b3f8-4a95-82fc-8f98b71ca12a) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b2fd9c3a-0006-466d-8482-5893d9f259ce / got description=b04e4ee1-c074-456c-905f-abaa7dda2e7d) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect d50c6616-c5a5-46d2-b1c7-d3adec1634be / got description=b2fd9c3a-0006-466d-8482-5893d9f259ce) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [35mDEBUG [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:181 -------------- FINISH RESET of m4 ----------------- -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:441 Running test_abort_restart... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:71 test_abort: remove all the agreements to master 4... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:446 test_abort_restart: stop master 3 to freeze the cleanAllRUV task... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:450 test_abort_restart: add the cleanAllRUV task... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:465 test_abort_abort: check master 1 no longer has a cleanAllRUV task... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:487 test_abort_restart PASSED, restoring master 4... | |||
Passed | suites/replication/cleanallruv_test.py::test_abort_certify | 35.60 | |
-------------------------------Captured log setup------------------------------- [35mDEBUG [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:153 Wait a bit before the reset - it is required for the slow machines [35mDEBUG [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:155 -------------- BEGIN RESET of m4 ----------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 68167fcc-0b64-4138-9852-70f029723929 / got description=d0c61d56-689c-4963-bf27-76792c352213) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect d635502c-e900-44b7-85d8-4b312baf0a96 / got description=68167fcc-0b64-4138-9852-70f029723929) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 72bb7f1d-3c60-4bfe-ab60-cf303e0a1e7d / got description=d635502c-e900-44b7-85d8-4b312baf0a96) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 60a46dcf-1bcd-478d-8690-b65de70cb33b / got description=72bb7f1d-3c60-4bfe-ab60-cf303e0a1e7d) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 298a9f81-3b3d-4f15-830a-d15ea9b0de6a / got description=60a46dcf-1bcd-478d-8690-b65de70cb33b) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 627781ad-5efe-48b9-b9c3-1367ebc30dba / got description=298a9f81-3b3d-4f15-830a-d15ea9b0de6a) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect b18349cd-bb0d-4285-b341-e98328d8b63e / got description=627781ad-5efe-48b9-b9c3-1367ebc30dba) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 3d2c9640-9849-4ada-876e-75e7b811b831 / got description=b18349cd-bb0d-4285-b341-e98328d8b63e) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect cb156c05-8f0e-46d1-8e6e-dc5afd39b0fc / got description=3d2c9640-9849-4ada-876e-75e7b811b831) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 8736bb7b-6de6-4ae1-b1f5-958a17a42e5d / got description=cb156c05-8f0e-46d1-8e6e-dc5afd39b0fc) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect e3d06eb3-1a60-475d-aed3-f6ae905b06d5 / got description=8736bb7b-6de6-4ae1-b1f5-958a17a42e5d) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 8be553bf-9a1e-456a-a2b0-5551f6148e3b / got description=e3d06eb3-1a60-475d-aed3-f6ae905b06d5) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [35mDEBUG [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:181 -------------- FINISH RESET of m4 ----------------- -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:509 Running test_abort_certify... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:71 test_abort_certify: remove all the agreements to master 4... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:515 test_abort_certify: stop master 2 to freeze the cleanAllRUV task... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:519 test_abort_certify: add the cleanAllRUV task... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:531 test_abort_certify: abort the cleanAllRUV task... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:535 test_abort_certify... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:542 test_abort_certify: start master 2 to allow the abort task to finish... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:551 test_abort_certify: check master 1 no longer has a cleanAllRUV task... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:556 test_abort_certify PASSED, restoring master 4... | |||
Passed | suites/replication/cleanallruv_test.py::test_stress_clean | 44.29 | |
-------------------------------Captured log setup------------------------------- [35mDEBUG [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:153 Wait a bit before the reset - it is required for the slow machines [35mDEBUG [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:155 -------------- BEGIN RESET of m4 ----------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect f69975af-0a64-4e68-ac33-014069fb8137 / got description=251499dd-ec2d-4090-af59-b1e59e14ff50) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 1788def6-63b2-4ca4-9d4c-636e0f42fbd0 / got description=f69975af-0a64-4e68-ac33-014069fb8137) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect b9be81e2-7cec-4d98-8a7a-980c66a6d692 / got description=1788def6-63b2-4ca4-9d4c-636e0f42fbd0) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 1e9ee873-50c1-4826-be76-fe9ec5d7b2f6 / got description=b9be81e2-7cec-4d98-8a7a-980c66a6d692) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 5cc9bddd-e356-418e-8646-5b68f8d98a1a / got description=1e9ee873-50c1-4826-be76-fe9ec5d7b2f6) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect ed68c517-c6de-4085-a218-74b01be09148 / got description=5cc9bddd-e356-418e-8646-5b68f8d98a1a) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 498403a0-c46c-4334-97da-d00ae032122d / got description=ed68c517-c6de-4085-a218-74b01be09148) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 9d9d640c-aff2-4fc7-9ab7-44362bb24aad / got description=498403a0-c46c-4334-97da-d00ae032122d) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect f49b1ea7-5038-446a-9e07-896ec5e98335 / got description=9d9d640c-aff2-4fc7-9ab7-44362bb24aad) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 4fe8bdc3-e6ef-4342-9762-d679a085ede1 / got description=9a85b704-59c8-490b-8938-c3af7d3c879b) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 84b3f65f-6d10-430e-82b6-ebc169968330 / got description=4fe8bdc3-e6ef-4342-9762-d679a085ede1) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [35mDEBUG [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:181 -------------- FINISH RESET of m4 ----------------- -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:580 Running test_stress_clean... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:581 test_stress_clean: put all the masters under load... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:598 test_stress_clean: allow some time for replication to get flowing... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:604 test_stress_clean: allow some time for master 4 to push changes out (60 seconds)... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:71 test_stress_clean: remove all the agreements to master 4... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:620 test_stress_clean: wait for all the updates to finish... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:627 test_stress_clean: check if all the replicas have been cleaned... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:1 (suffix:rid) [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:2 (suffix:rid) [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:3 (suffix:rid) [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:631 test_stress_clean: PASSED, restoring master 4... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:634 Sleep for 120 seconds to allow replication to complete... [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect a55edde9-f83d-4bc1-bfc9-ad8b54d11b99 / got description=84b3f65f-6d10-430e-82b6-ebc169968330) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect b7d2791e-933c-44f5-8a8d-5a1b22e9653b / got description=a55edde9-f83d-4bc1-bfc9-ad8b54d11b99) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect d7bbd052-1efe-467e-b95a-caff71dd542c / got description=b7d2791e-933c-44f5-8a8d-5a1b22e9653b) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect d7bbd052-1efe-467e-b95a-caff71dd542c / got description=b7d2791e-933c-44f5-8a8d-5a1b22e9653b) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 8244b266-7a43-42f0-9f99-455c87a0e3f3 / got description=d7bbd052-1efe-467e-b95a-caff71dd542c) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect a6f8c450-0b73-419d-b198-40ab0d4beacc / got description=8244b266-7a43-42f0-9f99-455c87a0e3f3) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working | |||
Passed | suites/replication/cleanallruv_test.py::test_multiple_tasks_with_force | 56.74 | |
-------------------------------Captured log setup------------------------------- [35mDEBUG [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:153 Wait a bit before the reset - it is required for the slow machines [35mDEBUG [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:155 -------------- BEGIN RESET of m4 ----------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect a24b7c4e-b2e5-4605-8fed-db601302c6dd / got description=7a31554e-dbaf-466a-b7fe-6cf2b1917b47) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 0cb6663a-6e38-4091-a3b1-3fe8997b9296 / got description=a24b7c4e-b2e5-4605-8fed-db601302c6dd) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect dd14b793-7cb4-4927-b935-511693f97fed / got description=0cb6663a-6e38-4091-a3b1-3fe8997b9296) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect e26b5409-2b20-4276-ae1d-ef5348b8c479 / got description=dd14b793-7cb4-4927-b935-511693f97fed) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 33804b07-391b-489a-9c8e-3e9e56c16277 / got description=e26b5409-2b20-4276-ae1d-ef5348b8c479) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect d34d723b-394a-4c3b-b912-669638a98c26 / got description=33804b07-391b-489a-9c8e-3e9e56c16277) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect bca51c4a-bcf5-4511-8b8a-d70b7118d5d0 / got description=d34d723b-394a-4c3b-b912-669638a98c26) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect f0b95bea-54b1-46be-84dc-ef3da1c5aaeb / got description=f1843893-a898-435c-bcdf-77ec3865476b) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 731cbe90-039f-4dd5-b9af-c18274cea853 / got description=f0b95bea-54b1-46be-84dc-ef3da1c5aaeb) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 87d4f5da-fcd5-4c00-991b-cfe2c04c93ee / got description=731cbe90-039f-4dd5-b9af-c18274cea853) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect a0169705-e467-430b-9379-9d2412eb9cae / got description=87d4f5da-fcd5-4c00-991b-cfe2c04c93ee) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [35mDEBUG [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:181 -------------- FINISH RESET of m4 ----------------- -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:671 Running test_multiple_tasks_with_force... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:71 test_multiple_tasks_with_force: remove all the agreements to master 4... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:690 test_multiple_tasks_with_force: run the cleanAllRUV task with "force" on... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:699 test_multiple_tasks_with_force: run the cleanAllRUV task with "force" off... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:718 test_multiple_tasks_with_force: check all the masters have been cleaned... [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:1 (suffix:rid) [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:2 (suffix:rid) [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:3 (suffix:rid) [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:722 test_abort: check master 1 no longer has a cleanAllRUV task... | |||
Passed | suites/replication/cleanallruv_test.py::test_clean_shutdown_crash | 44.31 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5c9cfa16-fc44-4d39-97ed-25c03b39766a / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 3f3de97f-3d0a-4fac-9509-906e36fecd5c / got description=5c9cfa16-fc44-4d39-97ed-25c03b39766a) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:771 Enabling TLS [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:774 Creating replication dns [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:782 Changing auth type [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:803 Stopping master2 [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:806 Run the cleanAllRUV task [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:817 Check if master1 crashed [32mINFO [0m tests.suites.replication.cleanallruv_test:cleanallruv_test.py:820 Repeat | |||
Passed | suites/replication/conflict_resolve_test.py::TestTwoMasters::test_add_modrdn | 19.44 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ec075107-f329-4d27-9d57-1901ab88ddde / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 860eee5b-6fbd-40dc-885f-05cb48888940 / got description=ec075107-f329-4d27-9d57-1901ab88ddde) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect cc9f8476-73d9-4730-9e56-7e7a097e1c27 / got description=860eee5b-6fbd-40dc-885f-05cb48888940) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect cc9f8476-73d9-4730-9e56-7e7a097e1c27 / got description=860eee5b-6fbd-40dc-885f-05cb48888940) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect cc9f8476-73d9-4730-9e56-7e7a097e1c27 / got description=860eee5b-6fbd-40dc-885f-05cb48888940) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect cc9f8476-73d9-4730-9e56-7e7a097e1c27 / got description=860eee5b-6fbd-40dc-885f-05cb48888940) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:210 Test create - modrdn [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ba4cb612-1cc1-430d-8981-669ad896bd07 / got description=cc9f8476-73d9-4730-9e56-7e7a097e1c27) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ba4cb612-1cc1-430d-8981-669ad896bd07 / got description=cc9f8476-73d9-4730-9e56-7e7a097e1c27) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ba4cb612-1cc1-430d-8981-669ad896bd07 / got description=cc9f8476-73d9-4730-9e56-7e7a097e1c27) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 929d149a-1f68-4344-8700-5138d6662434 / got description=ba4cb612-1cc1-430d-8981-669ad896bd07) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working | |||
Passed | suites/replication/conflict_resolve_test.py::TestTwoMasters::test_conflict_attribute_multi_valued | 11.98 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:833 Check foo1 is on M1 [32mINFO [0m tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:833 Check foo2 is on M1 [32mINFO [0m tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:839 Check foo1 is on M1 [32mINFO [0m tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:839 Check foo2 is on M1 [32mINFO [0m tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:844 Check M1.uid foo1 is also on M2 [32mINFO [0m tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:844 Check M1.uid foo2 is also on M2 [32mINFO [0m tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:848 Check M2.uid foo1 is also on M1 [32mINFO [0m tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:848 Check M2.uid foo2 is also on M1 | |||
Passed | suites/replication/conflict_resolve_test.py::TestTwoMasters::test_conflict_attribute_single_valued | 10.26 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:921 Check foo1 is on M1 [32mINFO [0m tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:927 Check foo1 is on M2 [32mINFO [0m tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:932 Check M1.uid foo1 is also on M2 [32mINFO [0m tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:936 Check M2.uid foo1 is also on M1 | |||
Passed | suites/replication/encryption_cl5_test.py::test_algorithm_unhashed | 62.72 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 55123624-c0e3-449d-9504-2c2b5908547d / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect d5270ec1-61b0-408e-a43a-0142ebabb52f / got description=55123624-c0e3-449d-9504-2c2b5908547d) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect 6c684051-c41f-471d-9d45-0d2d9fe04a3f / got description=d5270ec1-61b0-408e-a43a-0142ebabb52f) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 is working -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.encryption_cl5_test:encryption_cl5_test.py:47 Configuring changelog encryption:master1 for: AES [32mINFO [0m tests.suites.replication.encryption_cl5_test:encryption_cl5_test.py:64 Running dbscan -f to check unhashed#user#password attr [32mINFO [0m lib389:__init__.py:3014 Running script: ['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-master1/db/userRoot/replication_changelog.db'] [32mINFO [0m tests.suites.replication.encryption_cl5_test:encryption_cl5_test.py:64 Running dbscan -f to check unhashed#user#password attr [32mINFO [0m lib389:__init__.py:3014 Running script: ['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-master2/db/userRoot/replication_changelog.db'] [32mINFO [0m tests.suites.replication.encryption_cl5_test:encryption_cl5_test.py:64 Running dbscan -f to check unhashed#user#password attr [32mINFO [0m lib389:__init__.py:3014 Running script: ['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-master1/db/userRoot/replication_changelog.db'] [32mINFO [0m tests.suites.replication.encryption_cl5_test:encryption_cl5_test.py:64 Running dbscan -f to check unhashed#user#password attr [32mINFO [0m lib389:__init__.py:3014 Running script: ['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-master2/db/userRoot/replication_changelog.db'] [32mINFO [0m tests.suites.replication.encryption_cl5_test:encryption_cl5_test.py:64 Running dbscan -f to check unhashed#user#password attr [32mINFO [0m lib389:__init__.py:3014 Running script: ['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-master1/db/userRoot/replication_changelog.db'] [32mINFO [0m tests.suites.replication.encryption_cl5_test:encryption_cl5_test.py:64 Running dbscan -f to check unhashed#user#password attr [32mINFO [0m lib389:__init__.py:3014 Running script: ['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-master2/db/userRoot/replication_changelog.db'] [32mINFO [0m tests.suites.replication.encryption_cl5_test:encryption_cl5_test.py:64 Running dbscan -f to check unhashed#user#password attr [32mINFO [0m lib389:__init__.py:3014 Running script: ['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-master1/db/userRoot/replication_changelog.db'] [32mINFO [0m tests.suites.replication.encryption_cl5_test:encryption_cl5_test.py:64 Running dbscan -f to check unhashed#user#password attr [32mINFO [0m lib389:__init__.py:3014 Running script: ['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-master2/db/userRoot/replication_changelog.db'] | |||
Passed | suites/replication/multiple_changelogs_test.py::test_multiple_changelogs | 13.14 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for consumer1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:169 Joining consumer consumer1 from master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect f4e55808-daba-4e24-91dd-553ce9f5bf0e / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is working [32mINFO [0m lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 [32mINFO [0m lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 already exists -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect c6b93146-dce5-4543-9e66-dcf058312c8d / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is working [32mINFO [0m lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 | |||
Passed | suites/replication/multiple_changelogs_test.py::test_multiple_changelogs_export_import | 10.88 | |
No log output captured. | |||
Passed | suites/replication/regression_test.py::test_special_symbol_replica_agreement | 1.19 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38902, 'ldap-secureport': 63602, 'server-id': 'standalone2', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:backend.py:80 List backend with suffix=dc=example,dc=com [32mINFO [0m lib389:__init__.py:1713 Found entry dn: cn=replication manager,cn=config cn: bind dn pseudo user cn: replication manager objectClass: top objectClass: person sn: bind dn pseudo user userPassword: {PBKDF2_SHA256}AAAIAKE6SQXuzkaREttfIEB/5yDjvK5zgrqq9xKOfs7ZCuWppsw7L9Wqvu2WqZOySH1ZoN2GRrw+T8We8kWTAaExUMMYaw6CCQKNg3sfU15MRa9kWMxMP4bpgYFkclH4VnTPVsu7egfeeD+Nm0334HQnnbcs73hSDp31fTl+/FGtp4lkcdFFSrtavLuDcrkwGwaj3ZxYjaNzuPM5EakDDfEyE6u1U6G5+E0k93IyILfEBVZgEkGQfG5MJTJHZ49xehuVSo9EqPQ6n44os1FIe9T/GilyVBjcJr1DwQJhRJD/pLKBUUuTHDIZ3S7ue5E9t0YDhGTkptEsIxlG51LObyOD4Bi4hzxWe3NrA7esic5RNAlus8L6fLQtQfukiIuI9PmChoYpSqfMtkWH7Ed5X5HZgE5qdO8ps8WN2t2j8ZfAE6cP [32mINFO [0m lib389:agreement.py:1169 Starting total init cn=-3meTo_ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:38902,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config | |||
Passed | suites/replication/regression_test.py::test_double_delete | 2.38 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 1c878725-284d-471e-801e-5a0aadaa7726 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 30a06f37-832c-4339-bd58-5383c22df547 / got description=1c878725-284d-471e-801e-5a0aadaa7726) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:165 Adding a test entry user -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:292 Deleting entry uid=testuser,ou=People,dc=example,dc=com from master1 [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:295 Deleting entry uid=testuser,ou=People,dc=example,dc=com from master2 [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect a2f5d26e-0eeb-4fbc-9b0a-fee03dde832d / got description=30a06f37-832c-4339-bd58-5383c22df547) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 8097a6bb-fea1-4112-8e8a-5e22c6fdbae0 / got description=a2f5d26e-0eeb-4fbc-9b0a-fee03dde832d) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working | |||
Passed | suites/replication/regression_test.py::test_repl_modrdn | 6.63 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:338 Add test entries - Add 3 OUs and 2 same users under 2 different OUs [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c804f228-75bb-4d84-8af8-c5c57a251b55 / got description=8097a6bb-fea1-4112-8e8a-5e22c6fdbae0) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect dee77ff3-c417-4922-8efb-fc2a4db78a60 / got description=c804f228-75bb-4d84-8af8-c5c57a251b55) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:362 Stop Replication [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:365 Apply modrdn to M1 - move test user from OU A -> C [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:368 Apply modrdn on M2 - move test user from OU B -> C [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:371 Start Replication [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:374 Wait for sometime for repl to resume [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 7d520388-cdc5-465d-bf99-764eaba59aec / got description=dee77ff3-c417-4922-8efb-fc2a4db78a60) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect d6314950-447d-4236-b5e3-64812d35d644 / got description=7d520388-cdc5-465d-bf99-764eaba59aec) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:378 Check that there should be only one test entry under ou=C on both masters [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:385 Check that the replication is working fine both ways, M1 <-> M2 [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 9f5baa9b-80c5-43d2-ae17-de836f667280 / got description=d6314950-447d-4236-b5e3-64812d35d644) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 80c24b42-7b28-40cb-b6af-b7122b7a74f5 / got description=9f5baa9b-80c5-43d2-ae17-de836f667280) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working | |||
Passed | suites/replication/regression_test.py::test_password_repl_error | 13.98 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:165 Adding a test entry user -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:409 Clean the error log [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:412 Set replication loglevel [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:415 Modifying entry uid=testuser,ou=People,dc=example,dc=com - change userpassword on master 1 [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 7b1e19a6-5782-4bac-b343-24dc89e95f78 / got description=80c24b42-7b28-40cb-b6af-b7122b7a74f5) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:422 Restart the servers to flush the logs [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:427 Check that password works on master 2 [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:431 Check the error log for the error with uid=testuser,ou=People,dc=example,dc=com [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:434 Set the default loglevel | |||
Passed | suites/replication/regression_test.py::test_invalid_agmt | 2.56 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 453b0ee7-ebf0-4147-a45c-e79dba62fe30 / got description=7b1e19a6-5782-4bac-b343-24dc89e95f78) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 29a8d5f2-c304-4b05-bd3e-360e1271c13a / got description=453b0ee7-ebf0-4147-a45c-e79dba62fe30) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working | |||
Passed | suites/replication/regression_test.py::test_fetch_bindDnGroup | 20.47 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 46f4f1ec-7342-40c7-992d-7efe36e79315 / got description=29a8d5f2-c304-4b05-bd3e-360e1271c13a) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working | |||
Passed | suites/replication/regression_test.py::test_plugin_bind_dn_tracking_and_replication | 1.17 | |
No log output captured. | |||
Passed | suites/replication/regression_test.py::test_cleanallruv_repl | 119.31 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master3 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'master3', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 63a037c8-2b4a-4922-ae95-ab1576740cf5 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 96459649-bb0c-4561-9da4-a87180dbfd32 / got description=63a037c8-2b4a-4922-ae95-ab1576740cf5) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master3 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 2527fa44-6bd7-42c6-85e8-8c870d538b8c / got description=96459649-bb0c-4561-9da4-a87180dbfd32) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect eb1b6e7f-c6fe-4393-bc5f-5d41019f2e05 / got description=2527fa44-6bd7-42c6-85e8-8c870d538b8c) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master3 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master3 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master2 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:704 Change the error log levels for all masters [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:708 Get the replication agreements for all 3 masters [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:713 Modify nsslapd-changelogmaxage=30 and nsslapd-changelogtrim-interval=5 for M1 and M2 [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:736 Add test users to 3 masters [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:tasks.py:1400 cleanAllRUV task (task-11092020_225851) completed successfully [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config | |||
Passed | suites/replication/regression_test.py::test_online_reinit_may_hang | 17.98 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 084d8e9a-1ccd-4798-9e65-9235d7b2446e / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 9060ade8-d05d-4b9f-89e8-f699fc851813 / got description=084d8e9a-1ccd-4798-9e65-9235d7b2446e) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists ------------------------------Captured stderr call------------------------------ ldiffile: /var/lib/dirsrv/slapd-master1/ldif/master1.ldif -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 6f4a9057-2da5-4768-bb64-5dafa032a657 / got description=9060ade8-d05d-4b9f-89e8-f699fc851813) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working | |||
Passed | suites/replication/regression_test.py::test_moving_entry_make_online_init_fail | 2.10 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 251bb2a9-164a-43e2-aeb7-ceccbbc8b407 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect d5d05754-bfb7-4a68-a02d-601b57beb6b0 / got description=251bb2a9-164a-43e2-aeb7-ceccbbc8b407) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:886 Generating DIT_0 [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:889 Created entry: ou=OU0, dc=example, dc=com [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:894 Created entry: ou=OU0, ou=OU0, dc=example, dc=com [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:215 Create password policy for subtree ou=OU0,dc=example,dc=com [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:903 Turning tuser0 into a tombstone entry [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:903 Turning tuser2 into a tombstone entry [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:903 Turning tuser4 into a tombstone entry [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:903 Turning tuser6 into a tombstone entry [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:903 Turning tuser8 into a tombstone entry [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:906 dc=example,dc=com => ou=OU0,dc=example,dc=com => ou=OU0,ou=OU0,dc=example,dc=com => 10 USERS [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:908 Generating DIT_1 [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:911 Created entry: ou=OU1,dc=example,dc=com [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:915 Created entry: ou=OU1, ou=OU1, dc=example, dc=com [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:215 Create password policy for subtree ou=OU1,dc=example,dc=com [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:919 Moving ou=OU0,ou=OU0,dc=example,dc=com to DIT_1 [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:922 Moving ou=OU0,dc=example,dc=com to DIT_1 [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:929 Moving USERS to ou=OU0,ou=OU0,ou=OU1,dc=example,dc=com [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:934 dc=example,dc=com => ou=OU1,dc=example,dc=com => ou=OU0,ou=OU1,dc=example,dc=com => ou=OU0,ou=OU0,ou=OU1,dc=example,dc=com => 10 USERS [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:936 Run Initialization. [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 8a9acba6-e793-4b09-889f-23b96111b4f5 / got description=d5d05754-bfb7-4a68-a02d-601b57beb6b0) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:945 m1entry count - 47 [32mINFO [0m tests.suites.replication.regression_test:regression_test.py:946 m2entry count - 47 | |||
Passed | suites/replication/repl_agmt_bootstrap_test.py::test_repl_agmt_bootstrap_credentials | 15.93 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ae6eb72f-d943-4c27-bbba-7b30766d61e8 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect eee84857-0512-4cce-9a42-04407e51235f / got description=ae6eb72f-d943-4c27-bbba-7b30766d61e8) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_add[nsDS5ReplicaType--1-4-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] | 0.42 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_add[nsDS5Flags--1-2-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] | 0.18 | |
No log output captured. | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_add[nsDS5ReplicaId-0-65536-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] | 0.17 | |
No log output captured. | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_add[nsds5ReplicaPurgeDelay--2-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] | 0.12 | |
No log output captured. | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_add[nsDS5ReplicaBindDnGroupCheckInterval--2-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] | 0.15 | |
No log output captured. | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_add[nsds5ReplicaTombstonePurgeInterval--2-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] | 0.15 | |
No log output captured. | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_add[nsds5ReplicaProtocolTimeout--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] | 0.12 | |
No log output captured. | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_add[nsds5ReplicaReleaseTimeout--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] | 0.25 | |
No log output captured. | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_add[nsds5ReplicaBackoffMin-0-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-3] | 0.26 | |
No log output captured. | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_add[nsds5ReplicaBackoffMax-0-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] | 0.51 | |
No log output captured. | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_modify[nsDS5Flags--1-2-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] | 0.22 | |
No log output captured. | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_modify[nsds5ReplicaPurgeDelay--2-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] | 0.20 | |
No log output captured. | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_modify[nsDS5ReplicaBindDnGroupCheckInterval--2-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] | 0.22 | |
No log output captured. | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_modify[nsds5ReplicaTombstonePurgeInterval--2-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] | 0.24 | |
No log output captured. | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_modify[nsds5ReplicaProtocolTimeout--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] | 0.23 | |
No log output captured. | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_modify[nsds5ReplicaReleaseTimeout--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] | 0.11 | |
No log output captured. | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_modify[nsds5ReplicaBackoffMin-0-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-3] | 0.22 | |
No log output captured. | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_modify[nsds5ReplicaBackoffMax-0-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] | 0.11 | |
No log output captured. | |||
Passed | suites/replication/replica_config_test.py::test_same_attr_yields_same_return_code | 0.21 | |
No log output captured. | |||
Passed | suites/replication/ruvstore_test.py::test_ruv_entry_backup | 7.49 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 209d82ff-4034-4049-8119-3374dcff57e7 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ad88e5ba-29d8-4792-b5ea-3cf4a6c9171a / got description=209d82ff-4034-4049-8119-3374dcff57e7) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists ------------------------------Captured stderr call------------------------------ ldiffile: /var/lib/dirsrv/slapd-master1/ldif/master1.ldif -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.ruvstore_test:ruvstore_test.py:102 LDAP operations add, modify, modrdn and delete [32mINFO [0m tests.suites.replication.ruvstore_test:ruvstore_test.py:58 Adding user to master1 [32mINFO [0m tests.suites.replication.ruvstore_test:ruvstore_test.py:61 Modify RDN of user: uid=rep2lusr,ou=People,dc=example,dc=com [32mINFO [0m tests.suites.replication.ruvstore_test:ruvstore_test.py:68 Deleting user: uid=ruvusr,ou=people,dc=example,dc=com [32mINFO [0m tests.suites.replication.ruvstore_test:ruvstore_test.py:106 Stopping the server instance to run db2ldif task to create backup file [32mINFO [0m tests.suites.replication.ruvstore_test:ruvstore_test.py:110 Starting the server after backup [32mINFO [0m tests.suites.replication.ruvstore_test:ruvstore_test.py:113 Checking if backup file contains RUV and required attributes [32mINFO [0m tests.suites.replication.ruvstore_test:ruvstore_test.py:51 Attribute found in RUV: objectClass [32mINFO [0m tests.suites.replication.ruvstore_test:ruvstore_test.py:51 Attribute found in RUV: nsUniqueId [32mINFO [0m tests.suites.replication.ruvstore_test:ruvstore_test.py:51 Attribute found in RUV: nsds50ruv [32mINFO [0m tests.suites.replication.ruvstore_test:ruvstore_test.py:51 Attribute found in RUV: nsruvReplicaLastModified | |||
Passed | suites/replication/series_of_repl_bugs_test.py::test_deletions_are_not_replicated | 10.94 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 4968f58f-40f5-4ff7-9965-32306816172e / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 1feb52af-857f-4deb-8db4-4a5cd1f16e4d / got description=4968f58f-40f5-4ff7-9965-32306816172e) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 64020a80-dbf2-435d-8972-828ad9aef900 / got description=1feb52af-857f-4deb-8db4-4a5cd1f16e4d) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b52d707a-0859-45e0-b298-8aa4ae99095c / got description=64020a80-dbf2-435d-8972-828ad9aef900) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working | |||
Passed | suites/replication/series_of_repl_bugs_test.py::test_error_20 | 1.07 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 996e402a-3553-481a-904b-a357c5ecfb90 / got description=b52d707a-0859-45e0-b298-8aa4ae99095c) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working | |||
Passed | suites/replication/series_of_repl_bugs_test.py::test_segfaults | 0.08 | |
No log output captured. | |||
Passed | suites/replication/series_of_repl_bugs_test.py::test_adding_deleting | 0.13 | |
No log output captured. | |||
Passed | suites/replication/series_of_repl_bugs_test.py::test_deleting_twice | 2.15 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect cf6ce9c1-724b-4a55-b3c5-80028e92933c / got description=996e402a-3553-481a-904b-a357c5ecfb90) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5f3052b5-2832-43fe-bde6-960f928ddfb8 / got description=cf6ce9c1-724b-4a55-b3c5-80028e92933c) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working | |||
Passed | suites/replication/series_of_repl_bugs_test.py::test_rename_entry | 2.33 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect caf67acb-2e6d-47bb-b1e0-732ebdfe5214 / got description=5f3052b5-2832-43fe-bde6-960f928ddfb8) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ebbca8b8-31d9-40e2-9630-61fb40caff9f / got description=caf67acb-2e6d-47bb-b1e0-732ebdfe5214) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working | |||
Passed | suites/replication/series_of_repl_bugs_test.py::test_userpassword_attribute | 2.14 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 2ff6e9f8-19b8-4991-89b9-6b7bfc00a09c / got description=ebbca8b8-31d9-40e2-9630-61fb40caff9f) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working | |||
Passed | suites/replication/series_of_repl_bugs_test.py::test_tombstone_modrdn | 0.13 | |
No log output captured. | |||
Passed | suites/replication/single_master_test.py::test_mail_attr_repl | 17.48 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for consumer1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:169 Joining consumer consumer1 from master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 78dd2342-4db8-4806-9529-5795367efec6 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is working [32mINFO [0m lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 [32mINFO [0m lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 already exists -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.single_master_test:single_master_test.py:67 Check that replication is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect eaad2b52-ef04-48c1-b534-0267f1c5613f / got description=78dd2342-4db8-4806-9529-5795367efec6) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is working [32mINFO [0m tests.suites.replication.single_master_test:single_master_test.py:83 Back up /var/lib/dirsrv/slapd-consumer1/db/userRoot/mail.db to /tmp/mail.db [32mINFO [0m tests.suites.replication.single_master_test:single_master_test.py:87 Remove 'mail' attr from master [32mINFO [0m tests.suites.replication.single_master_test:single_master_test.py:90 Wait for the replication to happen [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 23f0fd9c-c7b8-48aa-8ac8-9de8a5f468a8 / got description=eaad2b52-ef04-48c1-b534-0267f1c5613f) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 23f0fd9c-c7b8-48aa-8ac8-9de8a5f468a8 / got description=eaad2b52-ef04-48c1-b534-0267f1c5613f) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 23f0fd9c-c7b8-48aa-8ac8-9de8a5f468a8 / got description=eaad2b52-ef04-48c1-b534-0267f1c5613f) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is working [32mINFO [0m tests.suites.replication.single_master_test:single_master_test.py:94 Restore /tmp/mail.db to /var/lib/dirsrv/slapd-consumer1/db/userRoot/mail.db [32mINFO [0m tests.suites.replication.single_master_test:single_master_test.py:98 Make a search for mail attribute in attempt to crash server [32mINFO [0m tests.suites.replication.single_master_test:single_master_test.py:101 Make sure that server hasn't crashed [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect cc800d22-e52b-4796-896a-33e70c67a3ee / got description=23f0fd9c-c7b8-48aa-8ac8-9de8a5f468a8) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect cc800d22-e52b-4796-896a-33e70c67a3ee / got description=23f0fd9c-c7b8-48aa-8ac8-9de8a5f468a8) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect cc800d22-e52b-4796-896a-33e70c67a3ee / got description=23f0fd9c-c7b8-48aa-8ac8-9de8a5f468a8) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect cc800d22-e52b-4796-896a-33e70c67a3ee / got description=23f0fd9c-c7b8-48aa-8ac8-9de8a5f468a8) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is working | |||
Passed | suites/replication/single_master_test.py::test_lastupdate_attr_before_init | 0.29 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38902, 'ldap-secureport': 63602, 'server-id': 'standalone2', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:38901 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:38902 is was created | |||
Passed | suites/replication/tls_client_auth_repl_test.py::test_ssl_transport | 8.27 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 02a3721f-4ee2-4fe3-8455-21a3cfab59cd / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect eb966d5c-86fd-4d76-8c4a-95873a0ad559 / got description=02a3721f-4ee2-4fe3-8455-21a3cfab59cd) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect 32691533-4619-450f-9aba-627055337a19 / got description=eb966d5c-86fd-4d76-8c4a-95873a0ad559) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect 041f030c-7efb-4234-a11c-f2ad4b184f46 / got description=32691533-4619-450f-9aba-627055337a19) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect 1dd664f5-503c-4723-b3ae-96c8f787b1c4 / got description=041f030c-7efb-4234-a11c-f2ad4b184f46) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is working -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect c62706c0-b24b-4794-ab9e-102b5447dc34 / got description=1dd664f5-503c-4723-b3ae-96c8f787b1c4) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect e387805a-19aa-4da7-84ae-35b287ad03e7 / got description=c62706c0-b24b-4794-ab9e-102b5447dc34) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect 96086d7c-12c5-43af-9e67-18412d4f55c1 / got description=e387805a-19aa-4da7-84ae-35b287ad03e7) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect 9d0c3958-c288-46c2-860c-72784aaf26e0 / got description=96086d7c-12c5-43af-9e67-18412d4f55c1) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect 2b30a440-2cd1-4f27-9436-30f5bae95cb7 / got description=9d0c3958-c288-46c2-860c-72784aaf26e0) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect a085be48-8682-41f0-b09f-7ed304818e0d / got description=2b30a440-2cd1-4f27-9436-30f5bae95cb7) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect a4c3975a-4559-47bc-bd37-929f06bed65e / got description=a085be48-8682-41f0-b09f-7ed304818e0d) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect b5ddd3a1-104c-4b10-b54b-c091dce613c7 / got description=a4c3975a-4559-47bc-bd37-929f06bed65e) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is working | |||
Passed | suites/replication/tombstone_fixup_test.py::test_precise_tombstone_purging | 24.45 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:tasks.py:567 Export task export_11092020_230717 for file /var/lib/dirsrv/slapd-master1/ldif/export.ldif completed successfully [32mINFO [0m lib389.utils:tombstone_fixup_test.py:77 Import replication LDIF file... [32mINFO [0m lib389:tasks.py:498 Import task import_11092020_230720 for file /var/lib/dirsrv/slapd-master1/ldif/export.ldif completed successfully [32mINFO [0m lib389:tasks.py:937 tombstone fixup task fixupTombstone_11092020_230724 for backend userRoot completed successfully [32mINFO [0m lib389:tasks.py:937 tombstone fixup task fixupTombstone_11092020_230726 for backend userRoot completed successfully [32mINFO [0m lib389.utils:tombstone_fixup_test.py:116 Wait for tombstone purge interval to pass... [32mINFO [0m lib389.utils:tombstone_fixup_test.py:123 Wait for tombstone purge interval to pass again... | |||
Passed | suites/replication/tombstone_test.py::test_purge_success | 0.40 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. -------------------------------Captured log call-------------------------------- [32mINFO [0m Tombstone:tombstone.py:165 Reviving nsuniqueid=4f2e1f86-230a11eb-910ee7f2-633b8d0e,uid=testuser,ou=people,dc=example,dc=com -> uid=testuser,ou=people,dc=example,dc=com | |||
Passed | suites/replication/wait_for_async_feature_test.py::test_not_int_value | 0.01 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 8f7aa738-6668-43cd-ae92-3123b7d28701 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 4acd6b56-36ea-4b0a-9f02-3feae048f42f / got description=8f7aa738-6668-43cd-ae92-3123b7d28701) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists | |||
Passed | suites/replication/wait_for_async_feature_test.py::test_multi_value | 0.09 | |
No log output captured. | |||
Passed | suites/replication/wait_for_async_feature_test.py::test_value_check[waitfor_async_attr0] | 0.01 | |
No log output captured. | |||
Passed | suites/replication/wait_for_async_feature_test.py::test_value_check[waitfor_async_attr1] | 0.01 | |
No log output captured. | |||
Passed | suites/replication/wait_for_async_feature_test.py::test_value_check[waitfor_async_attr2] | 0.01 | |
No log output captured. | |||
Passed | suites/replication/wait_for_async_feature_test.py::test_value_check[waitfor_async_attr3] | 0.01 | |
No log output captured. | |||
Passed | suites/replication/wait_for_async_feature_test.py::test_behavior_with_value[waitfor_async_attr0] | 20.04 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:63 Add 100 nested entries under replicated suffix on master1 [32mINFO [0m tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:71 Delete created entries -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:170 Set Replication Debugging loglevel for the errorlog [32mINFO [0m tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:180 Gather all sync attempts within Counter dict, group by timestamp [32mINFO [0m tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:201 Take the most common timestamp and assert it has appeared in the range from 4 to 11 times [35mDEBUG [0m tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:204 4 <= 9 <= 11 | |||
Passed | suites/replication/wait_for_async_feature_test.py::test_behavior_with_value[waitfor_async_attr1] | 20.08 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:63 Add 100 nested entries under replicated suffix on master1 [32mINFO [0m tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:71 Delete created entries -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:170 Set Replication Debugging loglevel for the errorlog [32mINFO [0m tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:180 Gather all sync attempts within Counter dict, group by timestamp [32mINFO [0m tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:201 Take the most common timestamp and assert it has appeared in the range from 0 to 2 times [35mDEBUG [0m tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:204 0 <= 1 <= 2 | |||
Passed | suites/replication/wait_for_async_feature_test.py::test_behavior_with_value[waitfor_async_attr2] | 20.25 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:63 Add 100 nested entries under replicated suffix on master1 [32mINFO [0m tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:71 Delete created entries -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:170 Set Replication Debugging loglevel for the errorlog [32mINFO [0m tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:180 Gather all sync attempts within Counter dict, group by timestamp [32mINFO [0m tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:201 Take the most common timestamp and assert it has appeared in the range from 4 to 11 times [35mDEBUG [0m tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:204 4 <= 8 <= 11 | |||
Passed | suites/replication/wait_for_async_feature_test.py::test_behavior_with_value[waitfor_async_attr3] | 20.26 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:63 Add 100 nested entries under replicated suffix on master1 [32mINFO [0m tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:71 Delete created entries -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:170 Set Replication Debugging loglevel for the errorlog [32mINFO [0m tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:180 Gather all sync attempts within Counter dict, group by timestamp [32mINFO [0m tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:201 Take the most common timestamp and assert it has appeared in the range from 4 to 11 times [35mDEBUG [0m tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:204 4 <= 7 <= 11 | |||
Passed | suites/resource_limits/fdlimits_test.py::test_fd_limits | 0.07 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.resource_limits.fdlimits_test:fdlimits_test.py:69 Test PASSED | |||
Passed | suites/rewriters/adfilter_test.py::test_adfilter_objectCategory | 6.16 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/rewriters/basic_test.py::test_rewriters_container | 0.01 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/rewriters/basic_test.py::test_foo_filter_rewriter | 4.67 | |
No log output captured. | |||
Passed | suites/roles/basic_test.py::test_filterrole | 0.40 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/roles/basic_test.py::test_managedrole | 0.28 | |
No log output captured. | |||
Passed | suites/roles/basic_test.py::test_nestedrole | 0.35 | |
No log output captured. | |||
Passed | suites/sasl/allowed_mechs_test.py::test_basic_feature | 41.79 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. ------------------------------Captured stdout call------------------------------ ['EXTERNAL', 'GSS-SPNEGO', 'GSSAPI', 'DIGEST-MD5', 'CRAM-MD5', 'LOGIN', 'PLAIN', 'ANONYMOUS'] -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:allowed_mechs_test.py:75 Test we have some of the default mechanisms [32mINFO [0m lib389:allowed_mechs_test.py:83 Edit mechanisms to allow just PLAIN [32mINFO [0m lib389:allowed_mechs_test.py:91 Restart server and make sure we still have correct allowed mechs [32mINFO [0m lib389:allowed_mechs_test.py:100 Edit mechanisms to allow just PLAIN and EXTERNAL [32mINFO [0m lib389:allowed_mechs_test.py:108 Edit mechanisms to allow just PLAIN and GSSAPI [32mINFO [0m lib389:allowed_mechs_test.py:126 Edit mechanisms to allow just PLAIN, GSSAPI, and ANONYMOUS [32mINFO [0m lib389:allowed_mechs_test.py:146 Edit mechanisms to allow just PLAIN and ANONYMOUS [32mINFO [0m lib389:allowed_mechs_test.py:165 Reset allowed mechaisms [32mINFO [0m lib389:allowed_mechs_test.py:169 Check that we have the original set of mechanisms [32mINFO [0m lib389:allowed_mechs_test.py:174 Check that we have the original set of mechanisms after a restart | |||
Passed | suites/sasl/allowed_mechs_test.py::test_config_set_few_mechs | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:allowed_mechs_test.py:198 Set nsslapd-allowed-sasl-mechanisms to 'PLAIN GSSAPI' [32mINFO [0m lib389:allowed_mechs_test.py:201 Verify nsslapd-allowed-sasl-mechanisms has the values | |||
Passed | suites/sasl/plain_test.py::test_basic_feature | 12.15 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/sasl/regression_test.py::test_openldap_no_nss_crypto | 40.09 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 6a255cd5-c959-412d-bd07-a1b1ddd5c124 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 315e1ed8-9371-46a1-a9e2-e5eacd7fbd3a / got description=6a255cd5-c959-412d-bd07-a1b1ddd5c124) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.sasl.regression_test:regression_test.py:133 Ticket 47536 - Allow usage of OpenLDAP libraries that don't use NSS for crypto [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect 4ef63674-99f6-4573-8eb2-4be02f7e3c63 / got description=315e1ed8-9371-46a1-a9e2-e5eacd7fbd3a) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 is working [32mINFO [0m tests.suites.sasl.regression_test:regression_test.py:35 ######################### Adding 5 entries to master1 ###################### [32mINFO [0m tests.suites.sasl.regression_test:regression_test.py:35 ######################### Adding 5 entries to master2 ###################### [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect c31cfc38-96f7-4214-9fd2-729da096837c / got description=4ef63674-99f6-4573-8eb2-4be02f7e3c63) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect 6d9acd50-abbe-4184-910a-b4bfef7dafb6 / got description=c31cfc38-96f7-4214-9fd2-729da096837c) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is working [32mINFO [0m tests.suites.sasl.regression_test:regression_test.py:146 ##### Searching for entries on master1... [32mINFO [0m tests.suites.sasl.regression_test:regression_test.py:150 ##### Searching for entries on master2... [32mINFO [0m tests.suites.sasl.regression_test:regression_test.py:92 ######################### Relocate PEM files on master1 ###################### [32mINFO [0m tests.suites.sasl.regression_test:regression_test.py:100 ##### restart master1 [32mINFO [0m tests.suites.sasl.regression_test:regression_test.py:47 ######################### Check PEM files (/dev/shm/MyCA, /dev/shm/MyServerCert1, /dev/shm/MyServerKey1) in /dev/shm ###################### [32mINFO [0m tests.suites.sasl.regression_test:regression_test.py:53 /dev/shm/MyCA.pem is successfully generated. [32mINFO [0m tests.suites.sasl.regression_test:regression_test.py:66 /dev/shm/MyServerCert1.pem is successfully generated. [32mINFO [0m tests.suites.sasl.regression_test:regression_test.py:79 /dev/shm/MyServerKey1.pem is successfully generated. [32mINFO [0m tests.suites.sasl.regression_test:regression_test.py:35 ######################### Adding 5 entries to master1 ###################### [32mINFO [0m tests.suites.sasl.regression_test:regression_test.py:35 ######################### Adding 5 entries to master2 ###################### [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect 537a38c8-c2d6-41ec-89bf-926e2c26e335 / got description=6d9acd50-abbe-4184-910a-b4bfef7dafb6) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect 18f29c0a-9c3c-4a3e-a585-cd0e67e18cea / got description=537a38c8-c2d6-41ec-89bf-926e2c26e335) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect 18f29c0a-9c3c-4a3e-a585-cd0e67e18cea / got description=537a38c8-c2d6-41ec-89bf-926e2c26e335) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701 is working [32mINFO [0m tests.suites.sasl.regression_test:regression_test.py:162 ##### Searching for entries on master1... [32mINFO [0m tests.suites.sasl.regression_test:regression_test.py:166 ##### Searching for entries on master2... [32mINFO [0m lib389:tasks.py:567 Export task export_11092020_231609 for file /var/lib/dirsrv/slapd-master1/ldif/master1.ldif completed successfully [32mINFO [0m tests.suites.sasl.regression_test:regression_test.py:173 Ticket 47536 - PASSED | |||
Passed | suites/schema/eduperson_test.py::test_account_locking | 0.38 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.schema.eduperson_test:eduperson_test.py:88 Test PASSED | |||
Passed | suites/schema/schema_reload_test.py::test_schema_reload_with_searches | 1.28 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.schema.schema_reload_test:schema_reload_test.py:49 Test the searches still work as expected during schema reload tasks | |||
Passed | suites/schema/schema_reload_test.py::test_invalid_schema | 2.18 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.schema.schema_reload_test:schema_reload_test.py:234 Test schema-reload task with invalid schema [32mINFO [0m tests.suites.schema.schema_reload_test:schema_reload_test.py:237 Create valid schema file (98user.ldif)... [32mINFO [0m tests.suites.schema.schema_reload_test:schema_reload_test.py:252 Create invalid schema file (99user.ldif)... [32mINFO [0m tests.suites.schema.schema_reload_test:schema_reload_test.py:269 Run the schema-reload task, it should fail... [32mINFO [0m tests.suites.schema.schema_reload_test:schema_reload_test.py:276 Check cn=schema to verify the invalid schema was not added [32mINFO [0m tests.suites.schema.schema_reload_test:schema_reload_test.py:283 The invalid schema is not present on the server | |||
Passed | suites/schema/schema_replication_test.py::test_schema_replication_one | 9.47 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for consumer1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:169 Joining consumer consumer1 from master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect f03ca688-8bf9-48ac-917f-03c9f3fc8eba / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is working [32mINFO [0m lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 [32mINFO [0m lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 already exists [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:182 test_schema_replication_init topology_m1c1 <lib389.topologies.TopologyMain object at 0x7faa45f95790> (master <lib389.DirSrv object at 0x7faa54fdde80>, consumer <lib389.DirSrv object at 0x7faa54fddfa0> -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:schema_replication_test.py:41 ############################################### [32mINFO [0m lib389:schema_replication_test.py:42 ####### [32mINFO [0m lib389:schema_replication_test.py:43 ####### Extra OC Schema is pushed - no error [32mINFO [0m lib389:schema_replication_test.py:44 ####### [32mINFO [0m lib389:schema_replication_test.py:45 ################################################### [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:222 test_schema_replication_one topology_m1c1 <lib389.topologies.TopologyMain object at 0x7faa45f95790> (master <lib389.DirSrv object at 0x7faa54fdde80>, consumer <lib389.DirSrv object at 0x7faa54fddfa0> [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive 0 (expected 1) [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'1' (expected 2) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:233 test_schema_replication_one master_schema_csn=b'5faa1452000000000000' [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:234 ctest_schema_replication_one onsumer_schema_csn=b'5faa1452000000000000' [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:55 _pattern_errorlog: start at offset 0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [36] 389-Directory/2.0.1 B2020.315.0017 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [93] localhost.localdomain:39001 (/etc/dirsrv/slapd-master1) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [94] [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [196] [09/Nov/2020:23:16:55.782926758 -0500] - INFO - main - 389-Directory/2.0.1 B2020.315.0017 starting up [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [304] [09/Nov/2020:23:16:55.787945526 -0500] - INFO - main - Setting the maximum file descriptor limit to: 524288 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [396] [09/Nov/2020:23:16:55.800464941 -0500] - ERR - allow_operation - Component identity is NULL [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [504] [09/Nov/2020:23:16:56.889935158 -0500] - INFO - PBKDF2_SHA256 - Based on CPU performance, chose 2048 rounds [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [618] [09/Nov/2020:23:16:56.897671364 -0500] - INFO - bdb_config_upgrade_dse_info - create config entry from old config [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [720] [09/Nov/2020:23:16:56.908757002 -0500] - NOTICE - bdb_start_autotune - found 7980860k physical memory [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [816] [09/Nov/2020:23:16:56.914092646 -0500] - NOTICE - bdb_start_autotune - found 7303596k available [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [923] [09/Nov/2020:23:16:56.919484209 -0500] - NOTICE - bdb_start_autotune - cache autosizing: db cache: 498803k [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [1026] [09/Nov/2020:23:16:56.923635293 -0500] - NOTICE - bdb_start_autotune - total cache size: 408620032 B; [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [1162] [09/Nov/2020:23:16:57.232903481 -0500] - INFO - slapd_daemon - slapd started. Listening on All Interfaces port 39001 for LDAP requests [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [1287] [09/Nov/2020:23:16:57.239172597 -0500] - INFO - slapd_daemon - Listening on /var/run/slapd-master1.socket for LDAPI requests [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [1446] [09/Nov/2020:23:16:57.278573326 -0500] - INFO - postop_modify_config_dse - The change of nsslapd-securePort will not take effect until the server is restarted [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [1563] [09/Nov/2020:23:16:57.291923574 -0500] - INFO - ldbm_instance_config_cachememsize_set - force a minimal value 512000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [1741] [09/Nov/2020:23:16:57.912054557 -0500] - INFO - op_thread_cleanup - slapd shutting down - signaling operation threads - op stack size 2 max work q size 2 max work q stack size 2 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [1860] [09/Nov/2020:23:16:57.921090340 -0500] - INFO - slapd_daemon - slapd shutting down - waiting for 1 thread to terminate [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [1990] [09/Nov/2020:23:16:57.941312744 -0500] - INFO - slapd_daemon - slapd shutting down - closing down internal subsystems and plugins [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [2093] [09/Nov/2020:23:16:57.951607099 -0500] - INFO - bdb_pre_close - Waiting for 4 database threads to stop [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [2190] [09/Nov/2020:23:16:59.662235988 -0500] - INFO - bdb_pre_close - All database threads now stopped [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [2301] [09/Nov/2020:23:16:59.697288106 -0500] - INFO - ldbm_back_instance_set_destructor - Set of instances destroyed [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [2462] [09/Nov/2020:23:16:59.704348310 -0500] - INFO - connection_post_shutdown_cleanup - slapd shutting down - freed 2 work q stack objects - freed 2 op stack objects [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [2532] [09/Nov/2020:23:16:59.711894817 -0500] - INFO - main - slapd stopped. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [2634] [09/Nov/2020:23:17:01.093647249 -0500] - INFO - main - 389-Directory/2.0.1 B2020.315.0017 starting up [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [2742] [09/Nov/2020:23:17:01.100842623 -0500] - INFO - main - Setting the maximum file descriptor limit to: 524288 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [2834] [09/Nov/2020:23:17:01.115793677 -0500] - ERR - allow_operation - Component identity is NULL [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [2942] [09/Nov/2020:23:17:02.251603389 -0500] - INFO - PBKDF2_SHA256 - Based on CPU performance, chose 2048 rounds [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [3059] [09/Nov/2020:23:17:02.260154754 -0500] - INFO - ldbm_instance_config_cachememsize_set - force a minimal value 512000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [3161] [09/Nov/2020:23:17:02.270428271 -0500] - NOTICE - bdb_start_autotune - found 7980860k physical memory [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [3257] [09/Nov/2020:23:17:02.275275249 -0500] - NOTICE - bdb_start_autotune - found 7303664k available [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [3364] [09/Nov/2020:23:17:02.278205753 -0500] - NOTICE - bdb_start_autotune - cache autosizing: db cache: 498803k [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [3494] [09/Nov/2020:23:17:02.281055435 -0500] - NOTICE - bdb_start_autotune - cache autosizing: userRoot entry cache (1 total): 1376256k [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [3620] [09/Nov/2020:23:17:02.288190952 -0500] - NOTICE - bdb_start_autotune - cache autosizing: userRoot dn cache (1 total): 196608k [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [3724] [09/Nov/2020:23:17:02.292369976 -0500] - NOTICE - bdb_start_autotune - total cache size: 1834683392 B; [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [3860] [09/Nov/2020:23:17:02.400525423 -0500] - INFO - slapd_daemon - slapd started. Listening on All Interfaces port 39001 for LDAP requests [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [3985] [09/Nov/2020:23:17:02.407528703 -0500] - INFO - slapd_daemon - Listening on /var/run/slapd-master1.socket for LDAPI requests [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [4173] [09/Nov/2020:23:17:14.535740810 -0500] - NOTICE - NSMMReplicationPlugin - changelog program - _cl5ConstructRUV - Rebuilding the replication changelog RUV, this may take several minutes... [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [4353] [09/Nov/2020:23:17:14.544702578 -0500] - NOTICE - NSMMReplicationPlugin - changelog program - _cl5ConstructRUV - Rebuilding replication changelog RUV complete. Result 0 (Success) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [4541] [09/Nov/2020:23:17:14.553016610 -0500] - NOTICE - NSMMReplicationPlugin - changelog program - _cl5ConstructRUV - Rebuilding the replication changelog RUV, this may take several minutes... [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [4721] [09/Nov/2020:23:17:14.558756082 -0500] - NOTICE - NSMMReplicationPlugin - changelog program - _cl5ConstructRUV - Rebuilding replication changelog RUV complete. Result 0 (Success) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [5010] [09/Nov/2020:23:17:14.903960930 -0500] - WARN - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=temp_201" (ci-vm-10-0-139-53:39201): The remote replica has a different database generation ID than the local database. You may have to reinitialize the remote replica, or the local replica. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [5180] [09/Nov/2020:23:17:15.301927404 -0500] - INFO - NSMMReplicationPlugin - repl5_tot_run - Beginning total update of replica "agmt="cn=temp_201" (ci-vm-10-0-139-53:39201)". [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [5364] [09/Nov/2020:23:17:15.306313130 -0500] - NOTICE - NSMMReplicationPlugin - replica_subentry_check - Need to create replication keep alive entry <cn=repl keep alive 1,dc=example,dc=com> [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [5509] [09/Nov/2020:23:17:15.309893933 -0500] - INFO - NSMMReplicationPlugin - replica_subentry_create - add dn: cn=repl keep alive 1,dc=example,dc=com [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [5526] objectclass: top [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [5552] objectclass: ldapsubentry [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [5582] objectclass: extensibleObject [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [5604] cn: repl keep alive 1 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [5790] [09/Nov/2020:23:17:18.407531657 -0500] - INFO - NSMMReplicationPlugin - repl5_tot_run - Finished total update of replica "agmt="cn=temp_201" (ci-vm-10-0-139-53:39201)". Sent 16 entries. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [5790] [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:67 _pattern_errorlog: end at offset 5790 | |||
Passed | suites/schema/schema_replication_test.py::test_schema_replication_two | 11.68 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:schema_replication_test.py:41 ############################################### [32mINFO [0m lib389:schema_replication_test.py:42 ####### [32mINFO [0m lib389:schema_replication_test.py:43 ####### Extra OC Schema is pushed - (ticket 47721 allows to learn missing def) [32mINFO [0m lib389:schema_replication_test.py:44 ####### [32mINFO [0m lib389:schema_replication_test.py:45 ################################################### [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'2' (expected 3) [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'3' (expected 4) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:289 test_schema_replication_two master_schema_csn=b'5faa145e000000000000' [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:290 test_schema_replication_two consumer_schema_csn=b'5faa145e000000000000' [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:55 _pattern_errorlog: start at offset 5791 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [6003] 09/Nov/2020:23:17:34.735958376 -0500] - ERR - NSMMReplicationPlugin - update_consumer_schema - [S] Schema agmt="cn=201" (ci-vm-10-0-139-53:39201) must not be overwritten (set replication log for additional info) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:67 _pattern_errorlog: end at offset 6003 | |||
Passed | suites/schema/schema_replication_test.py::test_schema_replication_three | 9.49 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:schema_replication_test.py:41 ############################################### [32mINFO [0m lib389:schema_replication_test.py:42 ####### [32mINFO [0m lib389:schema_replication_test.py:43 ####### Extra OC Schema is pushed - no error [32mINFO [0m lib389:schema_replication_test.py:44 ####### [32mINFO [0m lib389:schema_replication_test.py:45 ################################################### [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'4' (expected 5) [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'5' (expected 6) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:340 test_schema_replication_three master_schema_csn=b'5faa1468000000000000' [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:341 test_schema_replication_three consumer_schema_csn=b'5faa1468000000000000' [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:55 _pattern_errorlog: start at offset 6004 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [6004] [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:67 _pattern_errorlog: end at offset 6004 | |||
Passed | suites/schema/schema_replication_test.py::test_schema_replication_four | 9.58 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:schema_replication_test.py:41 ############################################### [32mINFO [0m lib389:schema_replication_test.py:42 ####### [32mINFO [0m lib389:schema_replication_test.py:43 ####### Same OC - extra MUST: Schema is pushed - no error [32mINFO [0m lib389:schema_replication_test.py:44 ####### [32mINFO [0m lib389:schema_replication_test.py:45 ################################################### [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'6' (expected 7) [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'7' (expected 8) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:389 test_schema_replication_four master_schema_csn=b'5faa1471000000000000' [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:390 ctest_schema_replication_four onsumer_schema_csn=b'5faa1471000000000000' [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:55 _pattern_errorlog: start at offset 6005 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [6005] [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:67 _pattern_errorlog: end at offset 6005 | |||
Passed | suites/schema/schema_replication_test.py::test_schema_replication_five | 13.00 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:schema_replication_test.py:41 ############################################### [32mINFO [0m lib389:schema_replication_test.py:42 ####### [32mINFO [0m lib389:schema_replication_test.py:43 ####### Same OC - extra MUST: Schema is pushed - (fix for 47721) [32mINFO [0m lib389:schema_replication_test.py:44 ####### [32mINFO [0m lib389:schema_replication_test.py:45 ################################################### [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'8' (expected 9) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'8' (expected 9) [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'9' (expected 10) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:452 test_schema_replication_five master_schema_csn=b'5faa147e000000000000' [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:453 ctest_schema_replication_five onsumer_schema_csn=b'5faa147e000000000000' [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:55 _pattern_errorlog: start at offset 6006 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [6111] /Nov/2020:23:18:05.888270816 -0500] - DEBUG - replication - copy_operation_parameters - replica is null. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [6233] [09/Nov/2020:23:18:05.943517157 -0500] - DEBUG - _csngen_adjust_local_time - gen state before 5faa147a0002:1604981882:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [6354] [09/Nov/2020:23:18:05.951964632 -0500] - DEBUG - _csngen_adjust_local_time - gen state after 5faa147d0000:1604981885:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [6517] [09/Nov/2020:23:18:05.956709449 -0500] - DEBUG - NSMMReplicationPlugin - ruv_add_csn_inprogress - Successfully inserted csn 5faa147d000000010000 into pending list [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [6697] [09/Nov/2020:23:18:05.960898025 -0500] - DEBUG - NSMMReplicationPlugin - purge_entry_state_information - From entry cn=test_entry,dc=example,dc=com up to CSN 5fa0d9fa000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [6942] [09/Nov/2020:23:18:05.968517047 -0500] - DEBUG - NSMMReplicationPlugin - write_changelog_and_ruv - Writing change for cn=test_entry,dc=example,dc=com (uniqid: 9537b309-230b11eb-94b3c8a1-8c2d92e7, optype: 8) to changelog csn 5faa147d000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [7117] [09/Nov/2020:23:18:05.975065132 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - cl5WriteOperationTxn - Successfully written entry with csn (5faa147d000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [7255] [09/Nov/2020:23:18:05.979746108 -0500] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: committing all csns for csn 5faa147d000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [7385] [09/Nov/2020:23:18:05.984412097 -0500] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: processing data csn 5faa147d000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [7523] [09/Nov/2020:23:18:05.988674458 -0500] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Successfully committed csn 5faa147d000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [7651] [09/Nov/2020:23:18:06.000432294 -0500] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Rolled up to csn 5faa147d000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [7766] [09/Nov/2020:23:18:06.006835927 -0500] - DEBUG - replication - multimaster_mmr_postop - error 0 for operation 561. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [7940] [09/Nov/2020:23:18:06.015974642 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: wait_for_changes -> wait_for_changes [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [8122] [09/Nov/2020:23:18:06.020767485 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: wait_for_changes -> ready_to_acquire_replica [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [8293] [09/Nov/2020:23:18:06.028585654 -0500] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-53:39201) - Canceling linger on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [8415] [09/Nov/2020:23:18:06.036447741 -0500] - DEBUG - _csngen_adjust_local_time - gen state before 5faa147d0001:1604981885:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [8536] [09/Nov/2020:23:18:06.040377450 -0500] - DEBUG - _csngen_adjust_local_time - gen state after 5faa147e0000:1604981886:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [8703] [09/Nov/2020:23:18:06.046425821 -0500] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-139-53:39201): Replica was successfully acquired. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [8884] [09/Nov/2020:23:18:06.051832304 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: ready_to_acquire_replica -> sending_updates [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [9068] [09/Nov/2020:23:18:06.055690799 -0500] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Checking consumer schema localcsn:5faa147d000000000000 / remotecsn:5faa1471000000000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [9232] [09/Nov/2020:23:18:06.229574210 -0500] - DEBUG - schema_oc_compare_strict - Attribute telexNumber is not required in 'consumerNewOCA' of the remote consumer schema [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [9386] [09/Nov/2020:23:18:06.233156742 -0500] - DEBUG - schema_oc_superset_check - Remote consumerNewOCA schema objectclasses is a superset of the received one. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [9550] [09/Nov/2020:23:18:06.241826337 -0500] - DEBUG - schema_oc_compare_strict - Attribute telexNumber is not required in 'consumerNewOCA' of the remote consumer schema [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [9700] [09/Nov/2020:23:18:06.253198147 -0500] - DEBUG - schema_list_oc2learn - Add that unknown/extended objectclass consumerNewOCA (1.2.3.4.5.6.7.8.9.10.1) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [9986] [09/Nov/2020:23:18:06.261360466 -0500] - DEBUG - schema_oc_to_string - Replace (old[251]=( 1.2.3.4.5.6.7.8.9.10.1 NAME 'consumerNewOCA' DESC 'To test ticket 47490' SUP 'person' AUXILIARY MUST ( postalAddress $ preferredLocale ) MAY ( postalCode $ street ) X-ORIGIN 'blahblahblah' )) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [10299] [09/Nov/2020:23:18:06.265441159 -0500] - DEBUG - supplier_get_new_definitions - supplier takes objectclass: ( 1.2.3.4.5.6.7.8.9.10.1 NAME 'consumerNewOCA' DESC 'To test ticket 47490' SUP person AUXILIARY MUST ( postalAddress $ preferredLocale $ telexNumber ) MAY ( postalCode $ street ) X-ORIGIN 'user defined' ) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [10601] [09/Nov/2020:23:18:06.291272070 -0500] - DEBUG - modify_schema_prepare_mods - MOD[1] del (objectclasses): ( 1.2.3.4.5.6.7.8.9.10.1 NAME 'consumerNewOCA' DESC 'To test ticket 47490' SUP 'person' AUXILIARY MUST ( postalAddress $ preferredLocale ) MAY ( postalCode $ street ) X-ORIGIN 'blahblahblah' ) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [10912] [09/Nov/2020:23:18:06.298419807 -0500] - DEBUG - modify_schema_prepare_mods - MOD[0] add (objectclasses): ( 1.2.3.4.5.6.7.8.9.10.1 NAME 'consumerNewOCA' DESC 'To test ticket 47490' SUP person AUXILIARY MUST ( postalAddress $ preferredLocale $ telexNumber ) MAY ( postalCode $ street ) X-ORIGIN 'user defined' ) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [11020] [09/Nov/2020:23:18:06.308475920 -0500] - DEBUG - replication - copy_operation_parameters - replica is null. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [11143] [09/Nov/2020:23:18:06.315482060 -0500] - DEBUG - modify_schema_internal_mod - Successfully learn objectclasses definitions [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [11356] [09/Nov/2020:23:18:06.319700128 -0500] - ERR - NSMMReplicationPlugin - update_consumer_schema - [S] Schema agmt="cn=201" (ci-vm-10-0-139-53:39201) must not be overwritten (set replication log for additional info) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:67 _pattern_errorlog: end at offset 11356 | |||
Passed | suites/schema/schema_replication_test.py::test_schema_replication_six | 9.81 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:schema_replication_test.py:41 ############################################### [32mINFO [0m lib389:schema_replication_test.py:42 ####### [32mINFO [0m lib389:schema_replication_test.py:43 ####### Same OC - extra MUST: Schema is pushed - no error [32mINFO [0m lib389:schema_replication_test.py:44 ####### [32mINFO [0m lib389:schema_replication_test.py:45 ################################################### [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'10' (expected 11) [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'11' (expected 12) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:508 test_schema_replication_six master_schema_csn=b'5faa1488000000000000' [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:509 ctest_schema_replication_six onsumer_schema_csn=b'5faa1488000000000000' [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:55 _pattern_errorlog: start at offset 11357 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [11493] 09/Nov/2020:23:18:06.323856485 -0500] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] schema definitions may have been learned [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [11627] [09/Nov/2020:23:18:06.620990011 -0500] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Reread remotecsn:5faa147b000000000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [11811] [09/Nov/2020:23:18:06.626803466 -0500] - DEBUG - NSMMReplicationPlugin - conn_push_schema - Schema checking successful: ok to push the schema (agmt="cn=201" (ci-vm-10-0-139-53:39201)) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [11926] [09/Nov/2020:23:18:06.975445509 -0500] - DEBUG - csngen_adjust_time - gen state before 5faa147e0001:1604981886:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [12048] [09/Nov/2020:23:18:07.025075971 -0500] - DEBUG - _csngen_adjust_local_time - gen state before 5faa147e0001:1604981886:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [12169] [09/Nov/2020:23:18:07.032535746 -0500] - DEBUG - _csngen_adjust_local_time - gen state after 5faa147f0000:1604981887:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [12349] [09/Nov/2020:23:18:07.040722343 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-53:39201)): Consumer RUV: [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [12504] [09/Nov/2020:23:18:07.049499729 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replicageneration} 5faa144a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [12716] [09/Nov/2020:23:18:07.060452617 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replica 1 ldap://localhost.localdomain:39001} 5faa144a000100010000 5faa147a000000010000 00000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [12896] [09/Nov/2020:23:18:07.064551112 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-53:39201)): Supplier RUV: [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [13051] [09/Nov/2020:23:18:07.068280906 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replicageneration} 5faa144a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [13263] [09/Nov/2020:23:18:07.072224735 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replica 1 ldap://localhost.localdomain:39001} 5faa144a000100010000 5faa147d000000010000 5faa147e [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [13424] [09/Nov/2020:23:18:07.077107739 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_get_buffer - found thread private buffer cache 0x7f29bb5b8700 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [13650] [09/Nov/2020:23:18:07.081112453 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_get_buffer - _pool is 0x7f29e847abe0 _pool->pl_busy_lists is 0x7f29bb603ae0 _pool->pl_busy_lists->bl_buffers is 0x7f29bb5b8700 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [13944] [09/Nov/2020:23:18:07.084434868 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-53:39201) - (cscb 0 - state 0) - csnPrevMax () csnMax (5faa147d000000010000) csnBuf (5faa147a000000010000) csnConsumerMax (5faa147a000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [14057] [09/Nov/2020:23:18:07.088496530 -0500] - DEBUG - clcache_initial_anchorcsn - anchor is now: 5faa147a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [14247] [09/Nov/2020:23:18:07.094023730 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - agmt="cn=201" (ci-vm-10-0-139-53:39201): CSN 5faa147a000000010000 found, position set for replay [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [14402] [09/Nov/2020:23:18:07.098209295 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_get_next_change - load=1 rec=1 csn=5faa147d000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [14621] [09/Nov/2020:23:18:07.101590330 -0500] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-53:39201): Sending modify operation (dn="cn=test_entry,dc=example,dc=com" csn=5faa147d000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [14733] [09/Nov/2020:23:18:07.106366636 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Starting [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [14930] [09/Nov/2020:23:18:07.111700513 -0500] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-53:39201): Consumer successfully sent operation with csn 5faa147d000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [15243] [09/Nov/2020:23:18:07.120166779 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_adjust_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-53:39201) - (cscb 0 - state 1) - csnPrevMax (5faa147d000000010000) csnMax (5faa147d000000010000) csnBuf (5faa147d000000010000) csnConsumerMax (5faa147d000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [15366] [09/Nov/2020:23:18:07.131441797 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_load_buffer - rc=-30988 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [15549] [09/Nov/2020:23:18:07.145712465 -0500] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-139-53:39201): No more updates to send (cl5GetNextOperationToReplay) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [15661] [09/Nov/2020:23:18:07.161247006 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 0 19 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [15794] [09/Nov/2020:23:18:07.179784709 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 19 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [15924] [09/Nov/2020:23:18:07.186294432 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Result 3, 0, 0, 19, (null) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [16057] [09/Nov/2020:23:18:07.201431140 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 19 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [16190] [09/Nov/2020:23:18:07.209492223 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 19 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [16323] [09/Nov/2020:23:18:07.220136513 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 19 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [16456] [09/Nov/2020:23:18:07.229676485 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 19 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [16589] [09/Nov/2020:23:18:07.251946969 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 19 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [16702] [09/Nov/2020:23:18:07.279945953 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 19 19 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [16811] [09/Nov/2020:23:18:07.286927323 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain exiting [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [17082] [09/Nov/2020:23:18:07.291195669 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_return_buffer - session end: state=5 load=1 sent=1 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [17245] [09/Nov/2020:23:18:07.303691594 -0500] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-139-53:39201): Successfully released consumer [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [17414] [09/Nov/2020:23:18:07.310919584 -0500] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-139-53:39201) - Beginning linger on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [17587] [09/Nov/2020:23:18:07.318558516 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: sending_updates -> wait_for_changes [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [17695] [09/Nov/2020:23:18:08.027007273 -0500] - DEBUG - replication - copy_operation_parameters - replica is null. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [17865] [09/Nov/2020:23:18:08.031995759 -0500] - DEBUG - NSMMReplicationPlugin - agmt_set_enabled_from_entry: agreement is now disabled (agmt="cn=201" (ci-vm-10-0-139-53:39201)) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [18039] [09/Nov/2020:23:18:08.037350540 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: wait_for_changes -> wait_for_changes [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [18213] [09/Nov/2020:23:18:08.048490145 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: wait_for_changes -> wait_for_changes [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [18384] [09/Nov/2020:23:18:08.053334508 -0500] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-53:39201) - Canceling linger on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [18558] [09/Nov/2020:23:18:08.060942092 -0500] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-139-53:39201) - Disconnected from the consumer [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [18722] [09/Nov/2020:23:18:08.137545701 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_stop - agmt="cn=201" (ci-vm-10-0-139-53:39201): Protocol stopped after 0 seconds [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [18850] [09/Nov/2020:23:18:08.141903317 -0500] - DEBUG - NSMMReplicationPlugin - Database RUV: {replicageneration} 5faa144a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [19035] [09/Nov/2020:23:18:08.145147577 -0500] - DEBUG - NSMMReplicationPlugin - Database RUV: {replica 1 ldap://localhost.localdomain:39001} 5faa144a000100010000 5faa147d000000010000 5faa147e [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [19209] [09/Nov/2020:23:18:08.148129093 -0500] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-139-53:39201) - Disconnected from the consumer [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [19317] [09/Nov/2020:23:18:08.154601664 -0500] - DEBUG - replication - copy_operation_parameters - replica is null. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [19425] [09/Nov/2020:23:18:13.180318547 -0500] - DEBUG - replication - copy_operation_parameters - replica is null. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [19594] [09/Nov/2020:23:18:13.197492390 -0500] - DEBUG - NSMMReplicationPlugin - agmt_set_enabled_from_entry: agreement is now enabled (agmt="cn=201" (ci-vm-10-0-139-53:39201)) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [19768] [09/Nov/2020:23:18:13.207046993 -0500] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-53:39201) - No linger to cancel on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [19942] [09/Nov/2020:23:18:13.212094743 -0500] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-139-53:39201) - Disconnected from the consumer [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [20113] [09/Nov/2020:23:18:13.220158962 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: start -> ready_to_acquire_replica [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [20281] [09/Nov/2020:23:18:13.225144603 -0500] - DEBUG - NSMMReplicationPlugin - conn_connect - agmt="cn=201" (ci-vm-10-0-139-53:39201) - Trying non-secure slapi_ldap_init_ext [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [20807] [09/Nov/2020:23:18:13.232101300 -0500] - DEBUG - NSMMReplicationPlugin - conn_connect - agmt="cn=201" (ci-vm-10-0-139-53:39201) - binddn = cn=ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701,ou=services,dc=example,dc=com, passwd = {AES-TUhNR0NTcUdTSWIzRFFFRkRUQm1NRVVHQ1NxR1NJYjNEUUVGRERBNEJDUmtaVFV5WlRsbVl5MWtORGhrTTJJdw0KTlMxaFl6VTNNamcwT1Mxak1XVm1aVEEyTlFBQ0FRSUNBU0F3Q2dZSUtvWklodmNOQWdjd0hRWUpZSVpJQVdVRA0KQkFFcUJCQjd4NlBETjhVdlBBM09zZHphOFloVw==}3YoYoYU1jYNs6pwA+AojP+tDTZgc5hoAizypeBWkBkQpzCU9c506zfLA5y7ps+Cb [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [20852] 1k5LmqhzJjEEsOEg+fFgQ7P0u9lX4gg0wKWkeZ0qr/Y= [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [21026] [09/Nov/2020:23:18:13.239624464 -0500] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-53:39201) - No linger to cancel on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [21148] [09/Nov/2020:23:18:13.251099937 -0500] - DEBUG - _csngen_adjust_local_time - gen state before 5faa147f0000:1604981887:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [21269] [09/Nov/2020:23:18:13.260049815 -0500] - DEBUG - _csngen_adjust_local_time - gen state after 5faa14850000:1604981893:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [21436] [09/Nov/2020:23:18:13.273316285 -0500] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-139-53:39201): Replica was successfully acquired. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [21617] [09/Nov/2020:23:18:13.277738264 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: ready_to_acquire_replica -> sending_updates [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [21801] [09/Nov/2020:23:18:13.291247089 -0500] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Checking consumer schema localcsn:5faa147e000000000000 / remotecsn:5faa147b000000000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [21935] [09/Nov/2020:23:18:13.577674168 -0500] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Reread remotecsn:5faa147d000000000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [22119] [09/Nov/2020:23:18:13.583102258 -0500] - DEBUG - NSMMReplicationPlugin - conn_push_schema - Schema checking successful: ok to push the schema (agmt="cn=201" (ci-vm-10-0-139-53:39201)) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [22234] [09/Nov/2020:23:18:13.929182518 -0500] - DEBUG - csngen_adjust_time - gen state before 5faa14850001:1604981893:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [22414] [09/Nov/2020:23:18:13.939792783 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-53:39201)): Consumer RUV: [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [22569] [09/Nov/2020:23:18:13.954467252 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replicageneration} 5faa144a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [22781] [09/Nov/2020:23:18:13.964789828 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replica 1 ldap://localhost.localdomain:39001} 5faa144a000100010000 5faa147d000000010000 00000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [22961] [09/Nov/2020:23:18:13.974893201 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-53:39201)): Supplier RUV: [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [23116] [09/Nov/2020:23:18:13.982063751 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replicageneration} 5faa144a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [23328] [09/Nov/2020:23:18:13.988859153 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replica 1 ldap://localhost.localdomain:39001} 5faa144a000100010000 5faa147d000000010000 5faa147e [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [23622] [09/Nov/2020:23:18:14.001154846 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-53:39201) - (cscb 0 - state 1) - csnPrevMax () csnMax (5faa147d000000010000) csnBuf (00000000000000000000) csnConsumerMax (5faa147d000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [23745] [09/Nov/2020:23:18:14.010872087 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_load_buffer - rc=-30988 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [24016] [09/Nov/2020:23:18:14.014774253 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_return_buffer - session end: state=5 load=0 sent=0 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [24164] [09/Nov/2020:23:18:14.020303079 -0500] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-139-53:39201): No changes to send [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [24327] [09/Nov/2020:23:18:14.028197718 -0500] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-139-53:39201): Successfully released consumer [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [24496] [09/Nov/2020:23:18:14.032872704 -0500] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-139-53:39201) - Beginning linger on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [24669] [09/Nov/2020:23:18:14.037546702 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: sending_updates -> wait_for_changes [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [24791] [09/Nov/2020:23:18:15.217990804 -0500] - DEBUG - _csngen_adjust_local_time - gen state before 5faa14850001:1604981893:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [24912] [09/Nov/2020:23:18:15.222142773 -0500] - DEBUG - _csngen_adjust_local_time - gen state after 5faa14870000:1604981895:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [25075] [09/Nov/2020:23:18:15.225566605 -0500] - DEBUG - NSMMReplicationPlugin - ruv_add_csn_inprogress - Successfully inserted csn 5faa1487000000010000 into pending list [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [25255] [09/Nov/2020:23:18:15.229353550 -0500] - DEBUG - NSMMReplicationPlugin - purge_entry_state_information - From entry cn=test_entry,dc=example,dc=com up to CSN 5fa0d9fd000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [25500] [09/Nov/2020:23:18:15.234956459 -0500] - DEBUG - NSMMReplicationPlugin - write_changelog_and_ruv - Writing change for cn=test_entry,dc=example,dc=com (uniqid: 9537b309-230b11eb-94b3c8a1-8c2d92e7, optype: 8) to changelog csn 5faa1487000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [25675] [09/Nov/2020:23:18:15.239255367 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - cl5WriteOperationTxn - Successfully written entry with csn (5faa1487000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [25813] [09/Nov/2020:23:18:15.243946429 -0500] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: committing all csns for csn 5faa1487000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [25943] [09/Nov/2020:23:18:15.249381971 -0500] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: processing data csn 5faa1487000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [26081] [09/Nov/2020:23:18:15.252834177 -0500] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Successfully committed csn 5faa1487000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [26209] [09/Nov/2020:23:18:15.262611766 -0500] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Rolled up to csn 5faa1487000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [26324] [09/Nov/2020:23:18:15.265992808 -0500] - DEBUG - replication - multimaster_mmr_postop - error 0 for operation 561. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [26498] [09/Nov/2020:23:18:15.276652851 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: wait_for_changes -> wait_for_changes [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [26680] [09/Nov/2020:23:18:15.283697176 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: wait_for_changes -> ready_to_acquire_replica [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [26851] [09/Nov/2020:23:18:15.288297515 -0500] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-53:39201) - Canceling linger on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [27018] [09/Nov/2020:23:18:15.295393039 -0500] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-139-53:39201): Replica was successfully acquired. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [27199] [09/Nov/2020:23:18:15.303762367 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: ready_to_acquire_replica -> sending_updates [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [27383] [09/Nov/2020:23:18:15.313924572 -0500] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Checking consumer schema localcsn:5faa147e000000000000 / remotecsn:5faa147d000000000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [27517] [09/Nov/2020:23:18:15.605175545 -0500] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Reread remotecsn:5faa147e000000000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [27632] [09/Nov/2020:23:18:15.609173991 -0500] - DEBUG - csngen_adjust_time - gen state before 5faa14870002:1604981895:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [27812] [09/Nov/2020:23:18:15.614352730 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-53:39201)): Consumer RUV: [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [27967] [09/Nov/2020:23:18:15.617361679 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replicageneration} 5faa144a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [28179] [09/Nov/2020:23:18:15.622976709 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replica 1 ldap://localhost.localdomain:39001} 5faa144a000100010000 5faa147d000000010000 00000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [28359] [09/Nov/2020:23:18:15.629667959 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-53:39201)): Supplier RUV: [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [28514] [09/Nov/2020:23:18:15.633811253 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replicageneration} 5faa144a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [28726] [09/Nov/2020:23:18:15.639773371 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replica 1 ldap://localhost.localdomain:39001} 5faa144a000100010000 5faa1487000000010000 5faa1487 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [28887] [09/Nov/2020:23:18:15.647649895 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_get_buffer - found thread private buffer cache 0x7f29bb61d600 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [29113] [09/Nov/2020:23:18:15.653976559 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_get_buffer - _pool is 0x7f29e847abe0 _pool->pl_busy_lists is 0x7f29bb603ae0 _pool->pl_busy_lists->bl_buffers is 0x7f29bb61d600 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [29407] [09/Nov/2020:23:18:15.659342829 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-53:39201) - (cscb 0 - state 0) - csnPrevMax () csnMax (5faa1487000000010000) csnBuf (00000000000000000000) csnConsumerMax (5faa147d000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [29520] [09/Nov/2020:23:18:15.664255369 -0500] - DEBUG - clcache_initial_anchorcsn - anchor is now: 5faa147d000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [29710] [09/Nov/2020:23:18:15.668284393 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - agmt="cn=201" (ci-vm-10-0-139-53:39201): CSN 5faa147d000000010000 found, position set for replay [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [29865] [09/Nov/2020:23:18:15.673095944 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_get_next_change - load=1 rec=1 csn=5faa1487000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [29977] [09/Nov/2020:23:18:15.677667406 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Starting [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [30109] [09/Nov/2020:23:18:15.688028264 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [30328] [09/Nov/2020:23:18:15.692146783 -0500] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-53:39201): Sending modify operation (dn="cn=test_entry,dc=example,dc=com" csn=5faa1487000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [30460] [09/Nov/2020:23:18:15.700557497 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [30657] [09/Nov/2020:23:18:15.706676913 -0500] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-53:39201): Consumer successfully sent operation with csn 5faa1487000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [30970] [09/Nov/2020:23:18:15.713208583 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_adjust_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-53:39201) - (cscb 0 - state 1) - csnPrevMax (5faa1487000000010000) csnMax (5faa1487000000010000) csnBuf (5faa1487000000010000) csnConsumerMax (5faa1487000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [31093] [09/Nov/2020:23:18:15.716976823 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_load_buffer - rc=-30988 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [31276] [09/Nov/2020:23:18:15.721672337 -0500] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-139-53:39201): No more updates to send (cl5GetNextOperationToReplay) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [31388] [09/Nov/2020:23:18:15.725745306 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 0 14 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [31521] [09/Nov/2020:23:18:15.734804709 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 14 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [31651] [09/Nov/2020:23:18:15.740765857 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Result 3, 0, 0, 14, (null) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [31784] [09/Nov/2020:23:18:15.746109003 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 14 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [31917] [09/Nov/2020:23:18:15.751333476 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 14 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [32050] [09/Nov/2020:23:18:15.759509129 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 14 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [32183] [09/Nov/2020:23:18:15.768378070 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 14 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [32316] [09/Nov/2020:23:18:15.783148604 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 14 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [32449] [09/Nov/2020:23:18:15.804138111 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 14 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [32562] [09/Nov/2020:23:18:15.835025059 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 14 14 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [32671] [09/Nov/2020:23:18:15.843105757 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain exiting [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [32942] [09/Nov/2020:23:18:15.849358721 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_return_buffer - session end: state=5 load=1 sent=1 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [33105] [09/Nov/2020:23:18:15.857829263 -0500] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-139-53:39201): Successfully released consumer [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [33274] [09/Nov/2020:23:18:15.861943085 -0500] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-139-53:39201) - Beginning linger on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [33447] [09/Nov/2020:23:18:15.871577810 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: sending_updates -> wait_for_changes [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [33555] [09/Nov/2020:23:18:16.740832592 -0500] - DEBUG - replication - copy_operation_parameters - replica is null. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [33663] [09/Nov/2020:23:18:16.818980377 -0500] - DEBUG - replication - copy_operation_parameters - replica is null. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [33785] [09/Nov/2020:23:18:16.839831307 -0500] - DEBUG - _csngen_adjust_local_time - gen state before 5faa14870002:1604981895:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [33906] [09/Nov/2020:23:18:16.847073876 -0500] - DEBUG - _csngen_adjust_local_time - gen state after 5faa14880000:1604981896:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [34069] [09/Nov/2020:23:18:16.850703099 -0500] - DEBUG - NSMMReplicationPlugin - ruv_add_csn_inprogress - Successfully inserted csn 5faa1488000000010000 into pending list [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [34249] [09/Nov/2020:23:18:16.854665551 -0500] - DEBUG - NSMMReplicationPlugin - purge_entry_state_information - From entry cn=test_entry,dc=example,dc=com up to CSN 5fa0da07000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [34494] [09/Nov/2020:23:18:16.858937749 -0500] - DEBUG - NSMMReplicationPlugin - write_changelog_and_ruv - Writing change for cn=test_entry,dc=example,dc=com (uniqid: 9537b309-230b11eb-94b3c8a1-8c2d92e7, optype: 8) to changelog csn 5faa1488000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [34669] [09/Nov/2020:23:18:16.865057042 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - cl5WriteOperationTxn - Successfully written entry with csn (5faa1488000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [34807] [09/Nov/2020:23:18:16.870304642 -0500] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: committing all csns for csn 5faa1488000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [34937] [09/Nov/2020:23:18:16.874594713 -0500] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: processing data csn 5faa1488000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [35075] [09/Nov/2020:23:18:16.879331739 -0500] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Successfully committed csn 5faa1488000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [35203] [09/Nov/2020:23:18:16.888301880 -0500] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Rolled up to csn 5faa1488000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [35311] [09/Nov/2020:23:18:16.894507282 -0500] - DEBUG - replication - copy_operation_parameters - replica is null. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [35426] [09/Nov/2020:23:18:16.900311783 -0500] - DEBUG - replication - multimaster_mmr_postop - error 0 for operation 561. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [35600] [09/Nov/2020:23:18:16.908133722 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: wait_for_changes -> wait_for_changes [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [35782] [09/Nov/2020:23:18:16.913528911 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: wait_for_changes -> ready_to_acquire_replica [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [35953] [09/Nov/2020:23:18:16.919222963 -0500] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-53:39201) - Canceling linger on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [36068] [09/Nov/2020:23:18:16.925557457 -0500] - DEBUG - replication - multimaster_mmr_postop - error 0 for operation 561. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [36235] [09/Nov/2020:23:18:16.931645587 -0500] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-139-53:39201): Replica was successfully acquired. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [36416] [09/Nov/2020:23:18:16.937238020 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: ready_to_acquire_replica -> sending_updates [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [36600] [09/Nov/2020:23:18:16.941345600 -0500] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Checking consumer schema localcsn:5faa1488000000000000 / remotecsn:5faa147e000000000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [36734] [09/Nov/2020:23:18:17.183557907 -0500] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Reread remotecsn:5faa147e000000000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [36918] [09/Nov/2020:23:18:17.189183441 -0500] - DEBUG - NSMMReplicationPlugin - conn_push_schema - Schema checking successful: ok to push the schema (agmt="cn=201" (ci-vm-10-0-139-53:39201)) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [37033] [09/Nov/2020:23:18:17.540363115 -0500] - DEBUG - csngen_adjust_time - gen state before 5faa14880002:1604981896:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [37155] [09/Nov/2020:23:18:17.552709763 -0500] - DEBUG - _csngen_adjust_local_time - gen state before 5faa14880002:1604981896:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [37276] [09/Nov/2020:23:18:17.556314153 -0500] - DEBUG - _csngen_adjust_local_time - gen state after 5faa14890000:1604981897:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [37456] [09/Nov/2020:23:18:17.559984699 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-53:39201)): Consumer RUV: [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [37611] [09/Nov/2020:23:18:17.564666711 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replicageneration} 5faa144a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [37823] [09/Nov/2020:23:18:17.568194310 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replica 1 ldap://localhost.localdomain:39001} 5faa144a000100010000 5faa1487000000010000 00000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [38003] [09/Nov/2020:23:18:17.572053096 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-53:39201)): Supplier RUV: [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [38158] [09/Nov/2020:23:18:17.575513115 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replicageneration} 5faa144a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [38370] [09/Nov/2020:23:18:17.579875099 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replica 1 ldap://localhost.localdomain:39001} 5faa144a000100010000 5faa1488000000010000 5faa1488 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [38531] [09/Nov/2020:23:18:17.583439932 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_get_buffer - found thread private buffer cache 0x7f29bb61d600 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [38757] [09/Nov/2020:23:18:17.588981296 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_get_buffer - _pool is 0x7f29e847abe0 _pool->pl_busy_lists is 0x7f29bb603ae0 _pool->pl_busy_lists->bl_buffers is 0x7f29bb61d600 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [39051] [09/Nov/2020:23:18:17.594862041 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-53:39201) - (cscb 0 - state 0) - csnPrevMax () csnMax (5faa1488000000010000) csnBuf (5faa1487000000010000) csnConsumerMax (5faa1487000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [39164] [09/Nov/2020:23:18:17.599022123 -0500] - DEBUG - clcache_initial_anchorcsn - anchor is now: 5faa1487000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [39354] [09/Nov/2020:23:18:17.602610343 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - agmt="cn=201" (ci-vm-10-0-139-53:39201): CSN 5faa1487000000010000 found, position set for replay [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [39509] [09/Nov/2020:23:18:17.606044675 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_get_next_change - load=1 rec=1 csn=5faa1488000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [39728] [09/Nov/2020:23:18:17.613928670 -0500] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-53:39201): Sending modify operation (dn="cn=test_entry,dc=example,dc=com" csn=5faa1488000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [39925] [09/Nov/2020:23:18:17.619337454 -0500] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-53:39201): Consumer successfully sent operation with csn 5faa1488000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [40238] [09/Nov/2020:23:18:17.623291352 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_adjust_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-53:39201) - (cscb 0 - state 1) - csnPrevMax (5faa1488000000010000) csnMax (5faa1488000000010000) csnBuf (5faa1488000000010000) csnConsumerMax (5faa1488000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [40361] [09/Nov/2020:23:18:17.627072561 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_load_buffer - rc=-30988 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [40544] [09/Nov/2020:23:18:17.630396993 -0500] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-139-53:39201): No more updates to send (cl5GetNextOperationToReplay) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [40656] [09/Nov/2020:23:18:17.633707582 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 0 21 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [40768] [09/Nov/2020:23:18:17.636870148 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Starting [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [40901] [09/Nov/2020:23:18:17.641637266 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 21 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [41031] [09/Nov/2020:23:18:17.644888594 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Result 3, 0, 0, 21, (null) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [41164] [09/Nov/2020:23:18:17.649758843 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 21 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [41297] [09/Nov/2020:23:18:17.654431686 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 21 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [41430] [09/Nov/2020:23:18:17.661149196 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 21 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [41563] [09/Nov/2020:23:18:17.669524234 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 21 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [41696] [09/Nov/2020:23:18:17.681571661 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 21 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [41829] [09/Nov/2020:23:18:17.702691468 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 21 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [41942] [09/Nov/2020:23:18:17.737008472 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 21 21 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [42051] [09/Nov/2020:23:18:17.741169935 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain exiting [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [42322] [09/Nov/2020:23:18:17.745462774 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_return_buffer - session end: state=5 load=1 sent=1 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [42485] [09/Nov/2020:23:18:17.753576094 -0500] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-139-53:39201): Successfully released consumer [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [42654] [09/Nov/2020:23:18:17.759139780 -0500] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-139-53:39201) - Beginning linger on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [42827] [09/Nov/2020:23:18:17.762543588 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: sending_updates -> wait_for_changes [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [42935] [09/Nov/2020:23:18:17.915088056 -0500] - DEBUG - replication - copy_operation_parameters - replica is null. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [43105] [09/Nov/2020:23:18:17.923527546 -0500] - DEBUG - NSMMReplicationPlugin - agmt_set_enabled_from_entry: agreement is now disabled (agmt="cn=201" (ci-vm-10-0-139-53:39201)) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [43279] [09/Nov/2020:23:18:17.932788646 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: wait_for_changes -> wait_for_changes [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [43453] [09/Nov/2020:23:18:17.936997817 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: wait_for_changes -> wait_for_changes [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [43624] [09/Nov/2020:23:18:17.941744489 -0500] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-53:39201) - Canceling linger on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [43798] [09/Nov/2020:23:18:17.948166739 -0500] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-139-53:39201) - Disconnected from the consumer [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [43962] [09/Nov/2020:23:18:18.032987962 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_stop - agmt="cn=201" (ci-vm-10-0-139-53:39201): Protocol stopped after 0 seconds [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [44090] [09/Nov/2020:23:18:18.037379580 -0500] - DEBUG - NSMMReplicationPlugin - Database RUV: {replicageneration} 5faa144a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [44275] [09/Nov/2020:23:18:18.041971035 -0500] - DEBUG - NSMMReplicationPlugin - Database RUV: {replica 1 ldap://localhost.localdomain:39001} 5faa144a000100010000 5faa1488000000010000 5faa1488 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [44449] [09/Nov/2020:23:18:18.045694247 -0500] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-139-53:39201) - Disconnected from the consumer [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [44557] [09/Nov/2020:23:18:18.050630102 -0500] - DEBUG - replication - copy_operation_parameters - replica is null. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [44665] [09/Nov/2020:23:18:23.071777590 -0500] - DEBUG - replication - copy_operation_parameters - replica is null. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [44834] [09/Nov/2020:23:18:23.077252816 -0500] - DEBUG - NSMMReplicationPlugin - agmt_set_enabled_from_entry: agreement is now enabled (agmt="cn=201" (ci-vm-10-0-139-53:39201)) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [45008] [09/Nov/2020:23:18:23.082981022 -0500] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-53:39201) - No linger to cancel on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [45182] [09/Nov/2020:23:18:23.086646164 -0500] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-139-53:39201) - Disconnected from the consumer [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [45353] [09/Nov/2020:23:18:23.090814864 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: start -> ready_to_acquire_replica [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [45521] [09/Nov/2020:23:18:23.096430904 -0500] - DEBUG - NSMMReplicationPlugin - conn_connect - agmt="cn=201" (ci-vm-10-0-139-53:39201) - Trying non-secure slapi_ldap_init_ext [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [46047] [09/Nov/2020:23:18:23.100174317 -0500] - DEBUG - NSMMReplicationPlugin - conn_connect - agmt="cn=201" (ci-vm-10-0-139-53:39201) - binddn = cn=ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701,ou=services,dc=example,dc=com, passwd = {AES-TUhNR0NTcUdTSWIzRFFFRkRUQm1NRVVHQ1NxR1NJYjNEUUVGRERBNEJDUmtaVFV5WlRsbVl5MWtORGhrTTJJdw0KTlMxaFl6VTNNamcwT1Mxak1XVm1aVEEyTlFBQ0FRSUNBU0F3Q2dZSUtvWklodmNOQWdjd0hRWUpZSVpJQVdVRA0KQkFFcUJCQjd4NlBETjhVdlBBM09zZHphOFloVw==}3YoYoYU1jYNs6pwA+AojP+tDTZgc5hoAizypeBWkBkQpzCU9c506zfLA5y7ps+Cb [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [46092] 1k5LmqhzJjEEsOEg+fFgQ7P0u9lX4gg0wKWkeZ0qr/Y= [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [46266] [09/Nov/2020:23:18:23.105468188 -0500] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-53:39201) - No linger to cancel on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [46388] [09/Nov/2020:23:18:23.111733811 -0500] - DEBUG - _csngen_adjust_local_time - gen state before 5faa14890000:1604981897:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [46509] [09/Nov/2020:23:18:23.116649956 -0500] - DEBUG - _csngen_adjust_local_time - gen state after 5faa148f0000:1604981903:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [46676] [09/Nov/2020:23:18:23.125567709 -0500] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-139-53:39201): Replica was successfully acquired. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [46857] [09/Nov/2020:23:18:23.129450455 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: ready_to_acquire_replica -> sending_updates [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [47041] [09/Nov/2020:23:18:23.133463686 -0500] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Checking consumer schema localcsn:5faa1488000000000000 / remotecsn:5faa147e000000000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [47175] [09/Nov/2020:23:18:23.421361817 -0500] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Reread remotecsn:5faa1488000000000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [47290] [09/Nov/2020:23:18:23.436068019 -0500] - DEBUG - csngen_adjust_time - gen state before 5faa148f0001:1604981903:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [47470] [09/Nov/2020:23:18:23.441733158 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-53:39201)): Consumer RUV: [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [47625] [09/Nov/2020:23:18:23.448252839 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replicageneration} 5faa144a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [47837] [09/Nov/2020:23:18:23.451777121 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replica 1 ldap://localhost.localdomain:39001} 5faa144a000100010000 5faa1488000000010000 00000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [48017] [09/Nov/2020:23:18:23.455484445 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-53:39201)): Supplier RUV: [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [48172] [09/Nov/2020:23:18:23.459355481 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replicageneration} 5faa144a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [48384] [09/Nov/2020:23:18:23.463411948 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replica 1 ldap://localhost.localdomain:39001} 5faa144a000100010000 5faa1488000000010000 5faa1488 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [48678] [09/Nov/2020:23:18:23.467506988 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-53:39201) - (cscb 0 - state 1) - csnPrevMax () csnMax (5faa1488000000010000) csnBuf (00000000000000000000) csnConsumerMax (5faa1488000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [48801] [09/Nov/2020:23:18:23.482830751 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_load_buffer - rc=-30988 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [49072] [09/Nov/2020:23:18:23.489727710 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_return_buffer - session end: state=5 load=0 sent=0 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [49220] [09/Nov/2020:23:18:23.494466795 -0500] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-139-53:39201): No changes to send [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [49383] [09/Nov/2020:23:18:23.503166359 -0500] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-139-53:39201): Successfully released consumer [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [49552] [09/Nov/2020:23:18:23.507568208 -0500] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-139-53:39201) - Beginning linger on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [49725] [09/Nov/2020:23:18:23.511076823 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: sending_updates -> wait_for_changes [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [49847] [09/Nov/2020:23:18:25.096100090 -0500] - DEBUG - _csngen_adjust_local_time - gen state before 5faa148f0001:1604981903:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [49968] [09/Nov/2020:23:18:25.127024158 -0500] - DEBUG - _csngen_adjust_local_time - gen state after 5faa14910000:1604981905:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [50131] [09/Nov/2020:23:18:25.134147003 -0500] - DEBUG - NSMMReplicationPlugin - ruv_add_csn_inprogress - Successfully inserted csn 5faa1491000000010000 into pending list [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [50311] [09/Nov/2020:23:18:25.147742667 -0500] - DEBUG - NSMMReplicationPlugin - purge_entry_state_information - From entry cn=test_entry,dc=example,dc=com up to CSN 5fa0da08000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [50556] [09/Nov/2020:23:18:25.152878404 -0500] - DEBUG - NSMMReplicationPlugin - write_changelog_and_ruv - Writing change for cn=test_entry,dc=example,dc=com (uniqid: 9537b309-230b11eb-94b3c8a1-8c2d92e7, optype: 8) to changelog csn 5faa1491000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [50731] [09/Nov/2020:23:18:25.164312350 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - cl5WriteOperationTxn - Successfully written entry with csn (5faa1491000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [50869] [09/Nov/2020:23:18:25.176685719 -0500] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: committing all csns for csn 5faa1491000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [50999] [09/Nov/2020:23:18:25.183079497 -0500] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: processing data csn 5faa1491000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [51137] [09/Nov/2020:23:18:25.189402476 -0500] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Successfully committed csn 5faa1491000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [51265] [09/Nov/2020:23:18:25.202942729 -0500] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Rolled up to csn 5faa1491000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [51380] [09/Nov/2020:23:18:25.212478693 -0500] - DEBUG - replication - multimaster_mmr_postop - error 0 for operation 561. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [51554] [09/Nov/2020:23:18:25.232927843 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: wait_for_changes -> wait_for_changes [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [51736] [09/Nov/2020:23:18:25.237232128 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: wait_for_changes -> ready_to_acquire_replica [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [51907] [09/Nov/2020:23:18:25.242734058 -0500] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-53:39201) - Canceling linger on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [52074] [09/Nov/2020:23:18:25.250235516 -0500] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-139-53:39201): Replica was successfully acquired. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [52255] [09/Nov/2020:23:18:25.259592287 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: ready_to_acquire_replica -> sending_updates [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [52370] [09/Nov/2020:23:18:25.265153181 -0500] - DEBUG - csngen_adjust_time - gen state before 5faa14910002:1604981905:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [52550] [09/Nov/2020:23:18:25.268928722 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-53:39201)): Consumer RUV: [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [52705] [09/Nov/2020:23:18:25.283524084 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replicageneration} 5faa144a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [52917] [09/Nov/2020:23:18:25.288276324 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replica 1 ldap://localhost.localdomain:39001} 5faa144a000100010000 5faa1488000000010000 00000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [53097] [09/Nov/2020:23:18:25.291658210 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-53:39201)): Supplier RUV: [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [53252] [09/Nov/2020:23:18:25.295890546 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replicageneration} 5faa144a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [53464] [09/Nov/2020:23:18:25.301359902 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replica 1 ldap://localhost.localdomain:39001} 5faa144a000100010000 5faa1491000000010000 5faa1491 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [53625] [09/Nov/2020:23:18:25.314300681 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_get_buffer - found thread private buffer cache 0x7f29bb578600 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [53851] [09/Nov/2020:23:18:25.318326268 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_get_buffer - _pool is 0x7f29e847abe0 _pool->pl_busy_lists is 0x7f29bb603ae0 _pool->pl_busy_lists->bl_buffers is 0x7f29bb578600 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [54145] [09/Nov/2020:23:18:25.321951236 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-53:39201) - (cscb 0 - state 0) - csnPrevMax () csnMax (5faa1491000000010000) csnBuf (00000000000000000000) csnConsumerMax (5faa1488000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [54258] [09/Nov/2020:23:18:25.327707213 -0500] - DEBUG - clcache_initial_anchorcsn - anchor is now: 5faa1488000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [54448] [09/Nov/2020:23:18:25.331734663 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - agmt="cn=201" (ci-vm-10-0-139-53:39201): CSN 5faa1488000000010000 found, position set for replay [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [54603] [09/Nov/2020:23:18:25.339752946 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_get_next_change - load=1 rec=1 csn=5faa1491000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [54715] [09/Nov/2020:23:18:25.344198253 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Starting [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [54847] [09/Nov/2020:23:18:25.349546499 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [55066] [09/Nov/2020:23:18:25.353675620 -0500] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-53:39201): Sending modify operation (dn="cn=test_entry,dc=example,dc=com" csn=5faa1491000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [55198] [09/Nov/2020:23:18:25.357685217 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [55395] [09/Nov/2020:23:18:25.362643226 -0500] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-53:39201): Consumer successfully sent operation with csn 5faa1491000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [55708] [09/Nov/2020:23:18:25.366192925 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_adjust_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-53:39201) - (cscb 0 - state 1) - csnPrevMax (5faa1491000000010000) csnMax (5faa1491000000010000) csnBuf (5faa1491000000010000) csnConsumerMax (5faa1491000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [55831] [09/Nov/2020:23:18:25.373698379 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_load_buffer - rc=-30988 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [56014] [09/Nov/2020:23:18:25.384744477 -0500] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-139-53:39201): No more updates to send (cl5GetNextOperationToReplay) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [56126] [09/Nov/2020:23:18:25.388139188 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 0 11 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [56259] [09/Nov/2020:23:18:25.392751789 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [56389] [09/Nov/2020:23:18:25.403703327 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Result 3, 0, 0, 11, (null) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [56522] [09/Nov/2020:23:18:25.408247054 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [56655] [09/Nov/2020:23:18:25.412720369 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [56788] [09/Nov/2020:23:18:25.422545708 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [56921] [09/Nov/2020:23:18:25.432652159 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [57054] [09/Nov/2020:23:18:25.446997756 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [57187] [09/Nov/2020:23:18:25.467980718 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [57300] [09/Nov/2020:23:18:25.492928598 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 11 11 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [57409] [09/Nov/2020:23:18:25.505833267 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain exiting [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [57680] [09/Nov/2020:23:18:25.513923366 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_return_buffer - session end: state=5 load=1 sent=1 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [57843] [09/Nov/2020:23:18:25.521914970 -0500] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-139-53:39201): Successfully released consumer [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [58012] [09/Nov/2020:23:18:25.530415470 -0500] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-139-53:39201) - Beginning linger on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [58185] [09/Nov/2020:23:18:25.534380243 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: sending_updates -> wait_for_changes [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [58185] [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:67 _pattern_errorlog: end at offset 58185 | |||
Passed | suites/schema/schema_replication_test.py::test_schema_replication_seven | 9.85 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:schema_replication_test.py:41 ############################################### [32mINFO [0m lib389:schema_replication_test.py:42 ####### [32mINFO [0m lib389:schema_replication_test.py:43 ####### Same OC - extra MAY: Schema is pushed - no error [32mINFO [0m lib389:schema_replication_test.py:44 ####### [32mINFO [0m lib389:schema_replication_test.py:45 ################################################### [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'12' (expected 13) [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'13' (expected 14) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:562 test_schema_replication_seven master_schema_csn=b'5faa1492000000000000' [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:563 ctest_schema_replication_seven consumer_schema_csn=b'5faa1492000000000000' [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:55 _pattern_errorlog: start at offset 58186 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [58293] 09/Nov/2020:23:18:26.709016205 -0500] - DEBUG - replication - copy_operation_parameters - replica is null. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [58401] [09/Nov/2020:23:18:26.803729529 -0500] - DEBUG - replication - copy_operation_parameters - replica is null. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [58523] [09/Nov/2020:23:18:26.839351320 -0500] - DEBUG - _csngen_adjust_local_time - gen state before 5faa14910002:1604981905:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [58644] [09/Nov/2020:23:18:26.856716437 -0500] - DEBUG - _csngen_adjust_local_time - gen state after 5faa14920000:1604981906:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [58807] [09/Nov/2020:23:18:26.873897691 -0500] - DEBUG - NSMMReplicationPlugin - ruv_add_csn_inprogress - Successfully inserted csn 5faa1492000000010000 into pending list [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [58987] [09/Nov/2020:23:18:26.888630833 -0500] - DEBUG - NSMMReplicationPlugin - purge_entry_state_information - From entry cn=test_entry,dc=example,dc=com up to CSN 5fa0da11000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [59232] [09/Nov/2020:23:18:26.895730004 -0500] - DEBUG - NSMMReplicationPlugin - write_changelog_and_ruv - Writing change for cn=test_entry,dc=example,dc=com (uniqid: 9537b309-230b11eb-94b3c8a1-8c2d92e7, optype: 8) to changelog csn 5faa1492000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [59407] [09/Nov/2020:23:18:26.904558280 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - cl5WriteOperationTxn - Successfully written entry with csn (5faa1492000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [59545] [09/Nov/2020:23:18:26.912697081 -0500] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: committing all csns for csn 5faa1492000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [59675] [09/Nov/2020:23:18:26.925570660 -0500] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: processing data csn 5faa1492000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [59813] [09/Nov/2020:23:18:26.929344760 -0500] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Successfully committed csn 5faa1492000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [59941] [09/Nov/2020:23:18:26.935084804 -0500] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Rolled up to csn 5faa1492000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [60056] [09/Nov/2020:23:18:26.943349459 -0500] - DEBUG - replication - multimaster_mmr_postop - error 0 for operation 561. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [60230] [09/Nov/2020:23:18:26.954333333 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: wait_for_changes -> wait_for_changes [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [60412] [09/Nov/2020:23:18:26.962302278 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: wait_for_changes -> ready_to_acquire_replica [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [60583] [09/Nov/2020:23:18:26.967013288 -0500] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-53:39201) - Canceling linger on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [60750] [09/Nov/2020:23:18:26.975095290 -0500] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-139-53:39201): Replica was successfully acquired. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [60931] [09/Nov/2020:23:18:26.982421556 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: ready_to_acquire_replica -> sending_updates [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [61115] [09/Nov/2020:23:18:26.986944972 -0500] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Checking consumer schema localcsn:5faa1492000000000000 / remotecsn:5faa1488000000000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [61278] [09/Nov/2020:23:18:27.279939156 -0500] - DEBUG - schema_oc_compare_strict - Attribute postOfficeBox is not allowed in 'masterNewOCA' of the remote consumer schema [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [61412] [09/Nov/2020:23:18:27.455539472 -0500] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Reread remotecsn:5faa1488000000000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [61596] [09/Nov/2020:23:18:27.460901982 -0500] - DEBUG - NSMMReplicationPlugin - conn_push_schema - Schema checking successful: ok to push the schema (agmt="cn=201" (ci-vm-10-0-139-53:39201)) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [61711] [09/Nov/2020:23:18:27.804108784 -0500] - DEBUG - csngen_adjust_time - gen state before 5faa14920002:1604981906:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [61833] [09/Nov/2020:23:18:27.811397155 -0500] - DEBUG - _csngen_adjust_local_time - gen state before 5faa14920002:1604981906:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [61954] [09/Nov/2020:23:18:27.815016563 -0500] - DEBUG - _csngen_adjust_local_time - gen state after 5faa14930000:1604981907:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [62134] [09/Nov/2020:23:18:27.818107980 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-53:39201)): Consumer RUV: [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [62289] [09/Nov/2020:23:18:27.821909267 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replicageneration} 5faa144a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [62501] [09/Nov/2020:23:18:27.825273968 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replica 1 ldap://localhost.localdomain:39001} 5faa144a000100010000 5faa1491000000010000 00000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [62681] [09/Nov/2020:23:18:27.829180653 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-53:39201)): Supplier RUV: [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [62836] [09/Nov/2020:23:18:27.837797427 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replicageneration} 5faa144a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [63048] [09/Nov/2020:23:18:27.842668712 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replica 1 ldap://localhost.localdomain:39001} 5faa144a000100010000 5faa1492000000010000 5faa1492 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [63209] [09/Nov/2020:23:18:27.846659132 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_get_buffer - found thread private buffer cache 0x7f29bb578600 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [63435] [09/Nov/2020:23:18:27.853093113 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_get_buffer - _pool is 0x7f29e847abe0 _pool->pl_busy_lists is 0x7f29bb603ae0 _pool->pl_busy_lists->bl_buffers is 0x7f29bb578600 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [63729] [09/Nov/2020:23:18:27.862470260 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-53:39201) - (cscb 0 - state 0) - csnPrevMax () csnMax (5faa1492000000010000) csnBuf (5faa1491000000010000) csnConsumerMax (5faa1491000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [63842] [09/Nov/2020:23:18:27.866478593 -0500] - DEBUG - clcache_initial_anchorcsn - anchor is now: 5faa1491000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [64032] [09/Nov/2020:23:18:27.870750901 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - agmt="cn=201" (ci-vm-10-0-139-53:39201): CSN 5faa1491000000010000 found, position set for replay [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [64187] [09/Nov/2020:23:18:27.878499321 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_get_next_change - load=1 rec=1 csn=5faa1492000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [64406] [09/Nov/2020:23:18:27.891463075 -0500] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-53:39201): Sending modify operation (dn="cn=test_entry,dc=example,dc=com" csn=5faa1492000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [64518] [09/Nov/2020:23:18:27.896268270 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Starting [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [64651] [09/Nov/2020:23:18:27.905810290 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 18 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [64781] [09/Nov/2020:23:18:27.911231121 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Result 3, 0, 0, 18, (null) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [64914] [09/Nov/2020:23:18:27.916005319 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 18 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [65047] [09/Nov/2020:23:18:27.921301691 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 18 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [65244] [09/Nov/2020:23:18:27.939484957 -0500] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-53:39201): Consumer successfully sent operation with csn 5faa1492000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [65377] [09/Nov/2020:23:18:27.950038815 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 18 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [65690] [09/Nov/2020:23:18:27.953642818 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_adjust_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-53:39201) - (cscb 0 - state 1) - csnPrevMax (5faa1492000000010000) csnMax (5faa1492000000010000) csnBuf (5faa1492000000010000) csnConsumerMax (5faa1492000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [65813] [09/Nov/2020:23:18:27.957799654 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_load_buffer - rc=-30988 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [65996] [09/Nov/2020:23:18:27.963398481 -0500] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-139-53:39201): No more updates to send (cl5GetNextOperationToReplay) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [66104] [09/Nov/2020:23:18:27.967482239 -0500] - DEBUG - replication - copy_operation_parameters - replica is null. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [66237] [09/Nov/2020:23:18:27.972014139 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 18 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [66350] [09/Nov/2020:23:18:27.990599129 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 18 18 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [66520] [09/Nov/2020:23:18:28.002018170 -0500] - DEBUG - NSMMReplicationPlugin - agmt_set_enabled_from_entry: agreement is now disabled (agmt="cn=201" (ci-vm-10-0-139-53:39201)) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [66629] [09/Nov/2020:23:18:28.010420629 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain exiting [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [66900] [09/Nov/2020:23:18:28.016976471 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_return_buffer - session end: state=5 load=1 sent=1 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [67063] [09/Nov/2020:23:18:28.025927235 -0500] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-139-53:39201): Successfully released consumer [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [67232] [09/Nov/2020:23:18:28.031294585 -0500] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-139-53:39201) - Beginning linger on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [67405] [09/Nov/2020:23:18:28.042417748 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: sending_updates -> wait_for_changes [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [67579] [09/Nov/2020:23:18:28.047031320 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: wait_for_changes -> wait_for_changes [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [67750] [09/Nov/2020:23:18:28.052035192 -0500] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-53:39201) - Canceling linger on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [67924] [09/Nov/2020:23:18:28.055788130 -0500] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-139-53:39201) - Disconnected from the consumer [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [68088] [09/Nov/2020:23:18:28.110610061 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_stop - agmt="cn=201" (ci-vm-10-0-139-53:39201): Protocol stopped after 0 seconds [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [68216] [09/Nov/2020:23:18:28.116645150 -0500] - DEBUG - NSMMReplicationPlugin - Database RUV: {replicageneration} 5faa144a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [68401] [09/Nov/2020:23:18:28.120574219 -0500] - DEBUG - NSMMReplicationPlugin - Database RUV: {replica 1 ldap://localhost.localdomain:39001} 5faa144a000100010000 5faa1492000000010000 5faa1492 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [68575] [09/Nov/2020:23:18:28.127489967 -0500] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-139-53:39201) - Disconnected from the consumer [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [68683] [09/Nov/2020:23:18:28.131841484 -0500] - DEBUG - replication - copy_operation_parameters - replica is null. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [68791] [09/Nov/2020:23:18:33.162022781 -0500] - DEBUG - replication - copy_operation_parameters - replica is null. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [68960] [09/Nov/2020:23:18:33.167426800 -0500] - DEBUG - NSMMReplicationPlugin - agmt_set_enabled_from_entry: agreement is now enabled (agmt="cn=201" (ci-vm-10-0-139-53:39201)) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [69134] [09/Nov/2020:23:18:33.174853460 -0500] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-53:39201) - No linger to cancel on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [69308] [09/Nov/2020:23:18:33.182657618 -0500] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-139-53:39201) - Disconnected from the consumer [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [69479] [09/Nov/2020:23:18:33.187408291 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: start -> ready_to_acquire_replica [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [69647] [09/Nov/2020:23:18:33.198510308 -0500] - DEBUG - NSMMReplicationPlugin - conn_connect - agmt="cn=201" (ci-vm-10-0-139-53:39201) - Trying non-secure slapi_ldap_init_ext [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [70173] [09/Nov/2020:23:18:33.202598501 -0500] - DEBUG - NSMMReplicationPlugin - conn_connect - agmt="cn=201" (ci-vm-10-0-139-53:39201) - binddn = cn=ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701,ou=services,dc=example,dc=com, passwd = {AES-TUhNR0NTcUdTSWIzRFFFRkRUQm1NRVVHQ1NxR1NJYjNEUUVGRERBNEJDUmtaVFV5WlRsbVl5MWtORGhrTTJJdw0KTlMxaFl6VTNNamcwT1Mxak1XVm1aVEEyTlFBQ0FRSUNBU0F3Q2dZSUtvWklodmNOQWdjd0hRWUpZSVpJQVdVRA0KQkFFcUJCQjd4NlBETjhVdlBBM09zZHphOFloVw==}3YoYoYU1jYNs6pwA+AojP+tDTZgc5hoAizypeBWkBkQpzCU9c506zfLA5y7ps+Cb [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [70218] 1k5LmqhzJjEEsOEg+fFgQ7P0u9lX4gg0wKWkeZ0qr/Y= [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [70392] [09/Nov/2020:23:18:33.212167122 -0500] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-53:39201) - No linger to cancel on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [70514] [09/Nov/2020:23:18:33.223225003 -0500] - DEBUG - _csngen_adjust_local_time - gen state before 5faa14930000:1604981907:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [70635] [09/Nov/2020:23:18:33.228657444 -0500] - DEBUG - _csngen_adjust_local_time - gen state after 5faa14990000:1604981913:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [70802] [09/Nov/2020:23:18:33.235859998 -0500] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-139-53:39201): Replica was successfully acquired. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [70983] [09/Nov/2020:23:18:33.242573941 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: ready_to_acquire_replica -> sending_updates [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [71167] [09/Nov/2020:23:18:33.249743546 -0500] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Checking consumer schema localcsn:5faa1492000000000000 / remotecsn:5faa1488000000000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [71301] [09/Nov/2020:23:18:33.504978743 -0500] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Reread remotecsn:5faa1492000000000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [71416] [09/Nov/2020:23:18:33.514868745 -0500] - DEBUG - csngen_adjust_time - gen state before 5faa14990001:1604981913:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [71596] [09/Nov/2020:23:18:33.519867646 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-53:39201)): Consumer RUV: [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [71751] [09/Nov/2020:23:18:33.523541003 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replicageneration} 5faa144a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [71963] [09/Nov/2020:23:18:33.526643139 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replica 1 ldap://localhost.localdomain:39001} 5faa144a000100010000 5faa1492000000010000 00000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [72143] [09/Nov/2020:23:18:33.532665956 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-53:39201)): Supplier RUV: [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [72298] [09/Nov/2020:23:18:33.541537392 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replicageneration} 5faa144a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [72510] [09/Nov/2020:23:18:33.545779461 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replica 1 ldap://localhost.localdomain:39001} 5faa144a000100010000 5faa1492000000010000 5faa1492 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [72804] [09/Nov/2020:23:18:33.550270179 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-53:39201) - (cscb 0 - state 1) - csnPrevMax () csnMax (5faa1492000000010000) csnBuf (00000000000000000000) csnConsumerMax (5faa1492000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [72927] [09/Nov/2020:23:18:33.558206713 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_load_buffer - rc=-30988 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [73198] [09/Nov/2020:23:18:33.566304409 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_return_buffer - session end: state=5 load=0 sent=0 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [73346] [09/Nov/2020:23:18:33.571003362 -0500] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-139-53:39201): No changes to send [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [73509] [09/Nov/2020:23:18:33.579276744 -0500] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-139-53:39201): Successfully released consumer [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [73678] [09/Nov/2020:23:18:33.583208306 -0500] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-139-53:39201) - Beginning linger on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [73851] [09/Nov/2020:23:18:33.588087436 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: sending_updates -> wait_for_changes [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [73973] [09/Nov/2020:23:18:35.187244969 -0500] - DEBUG - _csngen_adjust_local_time - gen state before 5faa14990001:1604981913:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [74094] [09/Nov/2020:23:18:35.192480842 -0500] - DEBUG - _csngen_adjust_local_time - gen state after 5faa149b0000:1604981915:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [74257] [09/Nov/2020:23:18:35.199542874 -0500] - DEBUG - NSMMReplicationPlugin - ruv_add_csn_inprogress - Successfully inserted csn 5faa149b000000010000 into pending list [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [74437] [09/Nov/2020:23:18:35.209057557 -0500] - DEBUG - NSMMReplicationPlugin - purge_entry_state_information - From entry cn=test_entry,dc=example,dc=com up to CSN 5fa0da12000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [74682] [09/Nov/2020:23:18:35.214177863 -0500] - DEBUG - NSMMReplicationPlugin - write_changelog_and_ruv - Writing change for cn=test_entry,dc=example,dc=com (uniqid: 9537b309-230b11eb-94b3c8a1-8c2d92e7, optype: 8) to changelog csn 5faa149b000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [74857] [09/Nov/2020:23:18:35.218315912 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - cl5WriteOperationTxn - Successfully written entry with csn (5faa149b000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [74995] [09/Nov/2020:23:18:35.222581324 -0500] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: committing all csns for csn 5faa149b000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [75125] [09/Nov/2020:23:18:35.228142766 -0500] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: processing data csn 5faa149b000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [75263] [09/Nov/2020:23:18:35.231946574 -0500] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Successfully committed csn 5faa149b000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [75391] [09/Nov/2020:23:18:35.235490455 -0500] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Rolled up to csn 5faa149b000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [75506] [09/Nov/2020:23:18:35.240724746 -0500] - DEBUG - replication - multimaster_mmr_postop - error 0 for operation 561. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [75680] [09/Nov/2020:23:18:35.248689757 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: wait_for_changes -> wait_for_changes [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [75862] [09/Nov/2020:23:18:35.252622225 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: wait_for_changes -> ready_to_acquire_replica [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [76033] [09/Nov/2020:23:18:35.258792295 -0500] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-53:39201) - Canceling linger on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [76200] [09/Nov/2020:23:18:35.267618548 -0500] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-139-53:39201): Replica was successfully acquired. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [76381] [09/Nov/2020:23:18:35.271739589 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: ready_to_acquire_replica -> sending_updates [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [76496] [09/Nov/2020:23:18:35.276391874 -0500] - DEBUG - csngen_adjust_time - gen state before 5faa149b0002:1604981915:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [76676] [09/Nov/2020:23:18:35.281684593 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-53:39201)): Consumer RUV: [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [76831] [09/Nov/2020:23:18:35.287517595 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replicageneration} 5faa144a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [77043] [09/Nov/2020:23:18:35.294172131 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replica 1 ldap://localhost.localdomain:39001} 5faa144a000100010000 5faa1492000000010000 00000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [77223] [09/Nov/2020:23:18:35.300786037 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-53:39201)): Supplier RUV: [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [77378] [09/Nov/2020:23:18:35.305240333 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replicageneration} 5faa144a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [77590] [09/Nov/2020:23:18:35.308528476 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replica 1 ldap://localhost.localdomain:39001} 5faa144a000100010000 5faa149b000000010000 5faa149b [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [77751] [09/Nov/2020:23:18:35.314020565 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_get_buffer - found thread private buffer cache 0x7f29bb578700 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [77977] [09/Nov/2020:23:18:35.320575357 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_get_buffer - _pool is 0x7f29e847abe0 _pool->pl_busy_lists is 0x7f29bb603ae0 _pool->pl_busy_lists->bl_buffers is 0x7f29bb578700 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [78271] [09/Nov/2020:23:18:35.327748926 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-53:39201) - (cscb 0 - state 0) - csnPrevMax () csnMax (5faa149b000000010000) csnBuf (00000000000000000000) csnConsumerMax (5faa1492000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [78384] [09/Nov/2020:23:18:35.331383734 -0500] - DEBUG - clcache_initial_anchorcsn - anchor is now: 5faa1492000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [78574] [09/Nov/2020:23:18:35.336200580 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - agmt="cn=201" (ci-vm-10-0-139-53:39201): CSN 5faa1492000000010000 found, position set for replay [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [78729] [09/Nov/2020:23:18:35.340494283 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_get_next_change - load=1 rec=1 csn=5faa149b000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [78948] [09/Nov/2020:23:18:35.345033996 -0500] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-53:39201): Sending modify operation (dn="cn=test_entry,dc=example,dc=com" csn=5faa149b000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [79060] [09/Nov/2020:23:18:35.350164601 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Starting [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [79192] [09/Nov/2020:23:18:35.362792342 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [79389] [09/Nov/2020:23:18:35.370464534 -0500] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-53:39201): Consumer successfully sent operation with csn 5faa149b000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [79522] [09/Nov/2020:23:18:35.374608098 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [79652] [09/Nov/2020:23:18:35.378191317 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Result 3, 0, 0, 11, (null) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [79785] [09/Nov/2020:23:18:35.383167680 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [80098] [09/Nov/2020:23:18:35.387825920 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_adjust_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-53:39201) - (cscb 0 - state 1) - csnPrevMax (5faa149b000000010000) csnMax (5faa149b000000010000) csnBuf (5faa149b000000010000) csnConsumerMax (5faa149b000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [80221] [09/Nov/2020:23:18:35.394472821 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_load_buffer - rc=-30988 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [80404] [09/Nov/2020:23:18:35.399170287 -0500] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-139-53:39201): No more updates to send (cl5GetNextOperationToReplay) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [80517] [09/Nov/2020:23:18:35.403133525 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 11 11 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [80650] [09/Nov/2020:23:18:35.407975761 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [80759] [09/Nov/2020:23:18:35.418750318 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain exiting [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [81030] [09/Nov/2020:23:18:35.429885357 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_return_buffer - session end: state=5 load=1 sent=1 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [81193] [09/Nov/2020:23:18:35.440270734 -0500] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-139-53:39201): Successfully released consumer [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [81362] [09/Nov/2020:23:18:35.444418392 -0500] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-139-53:39201) - Beginning linger on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [81535] [09/Nov/2020:23:18:35.449892378 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: sending_updates -> wait_for_changes [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [81535] [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:67 _pattern_errorlog: end at offset 81535 | |||
Passed | suites/schema/schema_replication_test.py::test_schema_replication_eight | 13.05 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:schema_replication_test.py:41 ############################################### [32mINFO [0m lib389:schema_replication_test.py:42 ####### [32mINFO [0m lib389:schema_replication_test.py:43 ####### Same OC - extra MAY: Schema is pushed (fix for 47721) [32mINFO [0m lib389:schema_replication_test.py:44 ####### [32mINFO [0m lib389:schema_replication_test.py:45 ################################################### [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'14' (expected 15) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'14' (expected 15) [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'15' (expected 16) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:625 test_schema_replication_eight master_schema_csn=b'5faa149f000000000000' [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:626 ctest_schema_replication_eight onsumer_schema_csn=b'5faa149f000000000000' [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:55 _pattern_errorlog: start at offset 81536 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [81643] 09/Nov/2020:23:18:39.002004725 -0500] - DEBUG - replication - copy_operation_parameters - replica is null. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [81751] [09/Nov/2020:23:18:39.120031422 -0500] - DEBUG - replication - copy_operation_parameters - replica is null. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [81873] [09/Nov/2020:23:18:39.149145551 -0500] - DEBUG - _csngen_adjust_local_time - gen state before 5faa149b0002:1604981915:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [81994] [09/Nov/2020:23:18:39.154995052 -0500] - DEBUG - _csngen_adjust_local_time - gen state after 5faa149f0000:1604981919:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [82157] [09/Nov/2020:23:18:39.172209276 -0500] - DEBUG - NSMMReplicationPlugin - ruv_add_csn_inprogress - Successfully inserted csn 5faa149f000000010000 into pending list [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [82337] [09/Nov/2020:23:18:39.177008230 -0500] - DEBUG - NSMMReplicationPlugin - purge_entry_state_information - From entry cn=test_entry,dc=example,dc=com up to CSN 5fa0da1b000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [82582] [09/Nov/2020:23:18:39.181941833 -0500] - DEBUG - NSMMReplicationPlugin - write_changelog_and_ruv - Writing change for cn=test_entry,dc=example,dc=com (uniqid: 9537b309-230b11eb-94b3c8a1-8c2d92e7, optype: 8) to changelog csn 5faa149f000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [82757] [09/Nov/2020:23:18:39.186624868 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - cl5WriteOperationTxn - Successfully written entry with csn (5faa149f000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [82895] [09/Nov/2020:23:18:39.191708226 -0500] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: committing all csns for csn 5faa149f000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [83025] [09/Nov/2020:23:18:39.211265396 -0500] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: processing data csn 5faa149f000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [83163] [09/Nov/2020:23:18:39.220039017 -0500] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Successfully committed csn 5faa149f000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [83291] [09/Nov/2020:23:18:39.225440750 -0500] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Rolled up to csn 5faa149f000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [83406] [09/Nov/2020:23:18:39.231699958 -0500] - DEBUG - replication - multimaster_mmr_postop - error 0 for operation 561. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [83580] [09/Nov/2020:23:18:39.239822293 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: wait_for_changes -> wait_for_changes [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [83762] [09/Nov/2020:23:18:39.243953414 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: wait_for_changes -> ready_to_acquire_replica [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [83933] [09/Nov/2020:23:18:39.252130800 -0500] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-53:39201) - Canceling linger on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [84100] [09/Nov/2020:23:18:39.262142467 -0500] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-139-53:39201): Replica was successfully acquired. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [84281] [09/Nov/2020:23:18:39.267250374 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: ready_to_acquire_replica -> sending_updates [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [84465] [09/Nov/2020:23:18:39.273097383 -0500] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Checking consumer schema localcsn:5faa149f000000000000 / remotecsn:5faa1492000000000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [84628] [09/Nov/2020:23:18:39.562469968 -0500] - DEBUG - schema_oc_compare_strict - Attribute postOfficeBox is not allowed in 'masterNewOCC' of the remote consumer schema [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [84793] [09/Nov/2020:23:18:39.568432559 -0500] - DEBUG - schema_oc_compare_strict - Attribute postOfficeBox is not allowed in 'consumerNewOCA' of the remote consumer schema [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [84947] [09/Nov/2020:23:18:39.576661007 -0500] - DEBUG - schema_oc_superset_check - Remote consumerNewOCA schema objectclasses is a superset of the received one. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [85110] [09/Nov/2020:23:18:39.586697366 -0500] - DEBUG - schema_oc_compare_strict - Attribute postOfficeBox is not allowed in 'masterNewOCC' of the remote consumer schema [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [85275] [09/Nov/2020:23:18:39.590880908 -0500] - DEBUG - schema_oc_compare_strict - Attribute postOfficeBox is not allowed in 'consumerNewOCA' of the remote consumer schema [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [85425] [09/Nov/2020:23:18:39.594219287 -0500] - DEBUG - schema_list_oc2learn - Add that unknown/extended objectclass consumerNewOCA (1.2.3.4.5.6.7.8.9.10.1) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [85725] [09/Nov/2020:23:18:39.598989061 -0500] - DEBUG - schema_oc_to_string - Replace (old[265]=( 1.2.3.4.5.6.7.8.9.10.1 NAME 'consumerNewOCA' DESC 'To test ticket 47490' SUP 'person' AUXILIARY MUST ( postalAddress $ preferredLocale $ telexNumber ) MAY ( postalCode $ street ) X-ORIGIN 'blahblahblah' )) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [86054] [09/Nov/2020:23:18:39.602331480 -0500] - DEBUG - supplier_get_new_definitions - supplier takes objectclass: ( 1.2.3.4.5.6.7.8.9.10.1 NAME 'consumerNewOCA' DESC 'To test ticket 47490' SUP person AUXILIARY MUST ( postalAddress $ preferredLocale $ telexNumber ) MAY ( postalCode $ street $ postOfficeBox ) X-ORIGIN 'user defined' ) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [86370] [09/Nov/2020:23:18:39.630019226 -0500] - DEBUG - modify_schema_prepare_mods - MOD[1] del (objectclasses): ( 1.2.3.4.5.6.7.8.9.10.1 NAME 'consumerNewOCA' DESC 'To test ticket 47490' SUP 'person' AUXILIARY MUST ( postalAddress $ preferredLocale $ telexNumber ) MAY ( postalCode $ street ) X-ORIGIN 'blahblahblah' ) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [86697] [09/Nov/2020:23:18:39.634351177 -0500] - DEBUG - modify_schema_prepare_mods - MOD[0] add (objectclasses): ( 1.2.3.4.5.6.7.8.9.10.1 NAME 'consumerNewOCA' DESC 'To test ticket 47490' SUP person AUXILIARY MUST ( postalAddress $ preferredLocale $ telexNumber ) MAY ( postalCode $ street $ postOfficeBox ) X-ORIGIN 'user defined' ) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [86805] [09/Nov/2020:23:18:39.637915755 -0500] - DEBUG - replication - copy_operation_parameters - replica is null. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [86928] [09/Nov/2020:23:18:39.643566366 -0500] - DEBUG - modify_schema_internal_mod - Successfully learn objectclasses definitions [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [87141] [09/Nov/2020:23:18:39.679692437 -0500] - ERR - NSMMReplicationPlugin - update_consumer_schema - [S] Schema agmt="cn=201" (ci-vm-10-0-139-53:39201) must not be overwritten (set replication log for additional info) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:67 _pattern_errorlog: end at offset 87141 | |||
Passed | suites/schema/schema_replication_test.py::test_schema_replication_nine | 9.73 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:schema_replication_test.py:41 ############################################### [32mINFO [0m lib389:schema_replication_test.py:42 ####### [32mINFO [0m lib389:schema_replication_test.py:43 ####### Same OC - extra MAY: Schema is pushed - no error [32mINFO [0m lib389:schema_replication_test.py:44 ####### [32mINFO [0m lib389:schema_replication_test.py:45 ################################################### [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'16' (expected 17) [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'17' (expected 18) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:683 test_schema_replication_nine master_schema_csn=b'5faa14aa000000000000' [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:684 ctest_schema_replication_nine onsumer_schema_csn=b'5faa14aa000000000000' [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:55 _pattern_errorlog: start at offset 87142 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [87278] 09/Nov/2020:23:18:39.690158231 -0500] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] schema definitions may have been learned [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [87441] [09/Nov/2020:23:18:39.870847935 -0500] - DEBUG - schema_oc_compare_strict - Attribute postOfficeBox is not allowed in 'masterNewOCC' of the remote consumer schema [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [87575] [09/Nov/2020:23:18:39.998227463 -0500] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Reread remotecsn:5faa149c000000000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [87759] [09/Nov/2020:23:18:40.003050360 -0500] - DEBUG - NSMMReplicationPlugin - conn_push_schema - Schema checking successful: ok to push the schema (agmt="cn=201" (ci-vm-10-0-139-53:39201)) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [87874] [09/Nov/2020:23:18:40.345856251 -0500] - DEBUG - csngen_adjust_time - gen state before 5faa149f0002:1604981919:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [87996] [09/Nov/2020:23:18:40.352499810 -0500] - DEBUG - _csngen_adjust_local_time - gen state before 5faa149f0002:1604981919:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [88117] [09/Nov/2020:23:18:40.356310389 -0500] - DEBUG - _csngen_adjust_local_time - gen state after 5faa14a00000:1604981920:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [88297] [09/Nov/2020:23:18:40.361335479 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-53:39201)): Consumer RUV: [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [88452] [09/Nov/2020:23:18:40.365507425 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replicageneration} 5faa144a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [88664] [09/Nov/2020:23:18:40.372703472 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replica 1 ldap://localhost.localdomain:39001} 5faa144a000100010000 5faa149b000000010000 00000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [88844] [09/Nov/2020:23:18:40.380370661 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-53:39201)): Supplier RUV: [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [88999] [09/Nov/2020:23:18:40.385507520 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replicageneration} 5faa144a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [89211] [09/Nov/2020:23:18:40.390040136 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replica 1 ldap://localhost.localdomain:39001} 5faa144a000100010000 5faa149f000000010000 5faa149f [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [89372] [09/Nov/2020:23:18:40.398164092 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_get_buffer - found thread private buffer cache 0x7f29bb578700 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [89598] [09/Nov/2020:23:18:40.405976529 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_get_buffer - _pool is 0x7f29e847abe0 _pool->pl_busy_lists is 0x7f29bb603ae0 _pool->pl_busy_lists->bl_buffers is 0x7f29bb578700 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [89892] [09/Nov/2020:23:18:40.413222240 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-53:39201) - (cscb 0 - state 0) - csnPrevMax () csnMax (5faa149f000000010000) csnBuf (5faa149b000000010000) csnConsumerMax (5faa149b000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [90005] [09/Nov/2020:23:18:40.417789539 -0500] - DEBUG - clcache_initial_anchorcsn - anchor is now: 5faa149b000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [90195] [09/Nov/2020:23:18:40.423000601 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - agmt="cn=201" (ci-vm-10-0-139-53:39201): CSN 5faa149b000000010000 found, position set for replay [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [90350] [09/Nov/2020:23:18:40.427665078 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_get_next_change - load=1 rec=1 csn=5faa149f000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [90462] [09/Nov/2020:23:18:40.434169543 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Starting [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [90594] [09/Nov/2020:23:18:40.441233212 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [90813] [09/Nov/2020:23:18:40.455404395 -0500] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-53:39201): Sending modify operation (dn="cn=test_entry,dc=example,dc=com" csn=5faa149f000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [91010] [09/Nov/2020:23:18:40.463828011 -0500] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-53:39201): Consumer successfully sent operation with csn 5faa149f000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [91323] [09/Nov/2020:23:18:40.471284919 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_adjust_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-53:39201) - (cscb 0 - state 1) - csnPrevMax (5faa149f000000010000) csnMax (5faa149f000000010000) csnBuf (5faa149f000000010000) csnConsumerMax (5faa149f000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [91446] [09/Nov/2020:23:18:40.481182540 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_load_buffer - rc=-30988 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [91629] [09/Nov/2020:23:18:40.487924319 -0500] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-139-53:39201): No more updates to send (cl5GetNextOperationToReplay) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [91741] [09/Nov/2020:23:18:40.496345568 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 0 20 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [91873] [09/Nov/2020:23:18:40.500727768 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [92006] [09/Nov/2020:23:18:40.526601976 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 20 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [92136] [09/Nov/2020:23:18:40.558043820 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Result 3, 0, 0, 20, (null) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [92269] [09/Nov/2020:23:18:40.566847222 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 20 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [92402] [09/Nov/2020:23:18:40.576191597 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 20 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [92535] [09/Nov/2020:23:18:40.592754107 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 20 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [92648] [09/Nov/2020:23:18:40.602953089 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 20 20 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [92757] [09/Nov/2020:23:18:40.612983664 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain exiting [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [93028] [09/Nov/2020:23:18:40.618143801 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_return_buffer - session end: state=5 load=1 sent=1 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [93191] [09/Nov/2020:23:18:40.625809501 -0500] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-139-53:39201): Successfully released consumer [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [93360] [09/Nov/2020:23:18:40.630183193 -0500] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-139-53:39201) - Beginning linger on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [93533] [09/Nov/2020:23:18:40.634684160 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: sending_updates -> wait_for_changes [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [93641] [09/Nov/2020:23:18:41.248809766 -0500] - DEBUG - replication - copy_operation_parameters - replica is null. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [93811] [09/Nov/2020:23:18:41.264024532 -0500] - DEBUG - NSMMReplicationPlugin - agmt_set_enabled_from_entry: agreement is now disabled (agmt="cn=201" (ci-vm-10-0-139-53:39201)) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [93985] [09/Nov/2020:23:18:41.268992823 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: wait_for_changes -> wait_for_changes [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [94159] [09/Nov/2020:23:18:41.273949225 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: wait_for_changes -> wait_for_changes [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [94330] [09/Nov/2020:23:18:41.278380679 -0500] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-53:39201) - Canceling linger on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [94504] [09/Nov/2020:23:18:41.290211158 -0500] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-139-53:39201) - Disconnected from the consumer [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [94668] [09/Nov/2020:23:18:41.369172060 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_stop - agmt="cn=201" (ci-vm-10-0-139-53:39201): Protocol stopped after 0 seconds [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [94796] [09/Nov/2020:23:18:41.374575660 -0500] - DEBUG - NSMMReplicationPlugin - Database RUV: {replicageneration} 5faa144a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [94981] [09/Nov/2020:23:18:41.384142008 -0500] - DEBUG - NSMMReplicationPlugin - Database RUV: {replica 1 ldap://localhost.localdomain:39001} 5faa144a000100010000 5faa149f000000010000 5faa149f [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [95155] [09/Nov/2020:23:18:41.392692308 -0500] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-139-53:39201) - Disconnected from the consumer [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [95263] [09/Nov/2020:23:18:41.399463753 -0500] - DEBUG - replication - copy_operation_parameters - replica is null. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [95371] [09/Nov/2020:23:18:46.427574224 -0500] - DEBUG - replication - copy_operation_parameters - replica is null. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [95540] [09/Nov/2020:23:18:46.434942128 -0500] - DEBUG - NSMMReplicationPlugin - agmt_set_enabled_from_entry: agreement is now enabled (agmt="cn=201" (ci-vm-10-0-139-53:39201)) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [95714] [09/Nov/2020:23:18:46.442070461 -0500] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-53:39201) - No linger to cancel on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [95888] [09/Nov/2020:23:18:46.447091520 -0500] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-139-53:39201) - Disconnected from the consumer [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [96059] [09/Nov/2020:23:18:46.451695516 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: start -> ready_to_acquire_replica [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [96227] [09/Nov/2020:23:18:46.457017192 -0500] - DEBUG - NSMMReplicationPlugin - conn_connect - agmt="cn=201" (ci-vm-10-0-139-53:39201) - Trying non-secure slapi_ldap_init_ext [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [96753] [09/Nov/2020:23:18:46.462985225 -0500] - DEBUG - NSMMReplicationPlugin - conn_connect - agmt="cn=201" (ci-vm-10-0-139-53:39201) - binddn = cn=ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701,ou=services,dc=example,dc=com, passwd = {AES-TUhNR0NTcUdTSWIzRFFFRkRUQm1NRVVHQ1NxR1NJYjNEUUVGRERBNEJDUmtaVFV5WlRsbVl5MWtORGhrTTJJdw0KTlMxaFl6VTNNamcwT1Mxak1XVm1aVEEyTlFBQ0FRSUNBU0F3Q2dZSUtvWklodmNOQWdjd0hRWUpZSVpJQVdVRA0KQkFFcUJCQjd4NlBETjhVdlBBM09zZHphOFloVw==}3YoYoYU1jYNs6pwA+AojP+tDTZgc5hoAizypeBWkBkQpzCU9c506zfLA5y7ps+Cb [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [96798] 1k5LmqhzJjEEsOEg+fFgQ7P0u9lX4gg0wKWkeZ0qr/Y= [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [96972] [09/Nov/2020:23:18:46.471461166 -0500] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-53:39201) - No linger to cancel on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [97094] [09/Nov/2020:23:18:46.478555321 -0500] - DEBUG - _csngen_adjust_local_time - gen state before 5faa14a00000:1604981920:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [97215] [09/Nov/2020:23:18:46.481997717 -0500] - DEBUG - _csngen_adjust_local_time - gen state after 5faa14a60000:1604981926:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [97382] [09/Nov/2020:23:18:46.488888827 -0500] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-139-53:39201): Replica was successfully acquired. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [97563] [09/Nov/2020:23:18:46.493373531 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: ready_to_acquire_replica -> sending_updates [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [97747] [09/Nov/2020:23:18:46.497393538 -0500] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Checking consumer schema localcsn:5faa149f000000000000 / remotecsn:5faa149c000000000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [97855] [09/Nov/2020:23:18:46.635229865 -0500] - DEBUG - replication - copy_operation_parameters - replica is null. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [97970] [09/Nov/2020:23:18:46.644882838 -0500] - DEBUG - replication - multimaster_mmr_postop - error 0 for operation 561. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [98104] [09/Nov/2020:23:18:46.749587998 -0500] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Reread remotecsn:5faa149f000000000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [98219] [09/Nov/2020:23:18:46.754512838 -0500] - DEBUG - csngen_adjust_time - gen state before 5faa14a60001:1604981926:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [98399] [09/Nov/2020:23:18:46.760777386 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-53:39201)): Consumer RUV: [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [98554] [09/Nov/2020:23:18:46.765337078 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replicageneration} 5faa144a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [98766] [09/Nov/2020:23:18:46.769809430 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replica 1 ldap://localhost.localdomain:39001} 5faa144a000100010000 5faa149f000000010000 00000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [98946] [09/Nov/2020:23:18:46.773665799 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-53:39201)): Supplier RUV: [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [99101] [09/Nov/2020:23:18:46.778934105 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replicageneration} 5faa144a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [99313] [09/Nov/2020:23:18:46.783526542 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replica 1 ldap://localhost.localdomain:39001} 5faa144a000100010000 5faa149f000000010000 5faa149f [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [99607] [09/Nov/2020:23:18:46.790892008 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-53:39201) - (cscb 0 - state 1) - csnPrevMax () csnMax (5faa149f000000010000) csnBuf (00000000000000000000) csnConsumerMax (5faa149f000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [99730] [09/Nov/2020:23:18:46.794925082 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_load_buffer - rc=-30988 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [100001] [09/Nov/2020:23:18:46.805408262 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_return_buffer - session end: state=5 load=0 sent=0 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [100149] [09/Nov/2020:23:18:46.811083860 -0500] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-139-53:39201): No changes to send [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [100312] [09/Nov/2020:23:18:46.819788996 -0500] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-139-53:39201): Successfully released consumer [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [100481] [09/Nov/2020:23:18:46.824092951 -0500] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-139-53:39201) - Beginning linger on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [100654] [09/Nov/2020:23:18:46.828863702 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: sending_updates -> wait_for_changes [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [100776] [09/Nov/2020:23:18:48.453527268 -0500] - DEBUG - _csngen_adjust_local_time - gen state before 5faa14a60001:1604981926:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [100897] [09/Nov/2020:23:18:48.458500951 -0500] - DEBUG - _csngen_adjust_local_time - gen state after 5faa14a80000:1604981928:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [101060] [09/Nov/2020:23:18:48.463307276 -0500] - DEBUG - NSMMReplicationPlugin - ruv_add_csn_inprogress - Successfully inserted csn 5faa14a8000000010000 into pending list [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [101240] [09/Nov/2020:23:18:48.468465492 -0500] - DEBUG - NSMMReplicationPlugin - purge_entry_state_information - From entry cn=test_entry,dc=example,dc=com up to CSN 5fa0da1f000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [101485] [09/Nov/2020:23:18:48.474804217 -0500] - DEBUG - NSMMReplicationPlugin - write_changelog_and_ruv - Writing change for cn=test_entry,dc=example,dc=com (uniqid: 9537b309-230b11eb-94b3c8a1-8c2d92e7, optype: 8) to changelog csn 5faa14a8000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [101660] [09/Nov/2020:23:18:48.479527051 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - cl5WriteOperationTxn - Successfully written entry with csn (5faa14a8000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [101798] [09/Nov/2020:23:18:48.485817533 -0500] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: committing all csns for csn 5faa14a8000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [101928] [09/Nov/2020:23:18:48.490656617 -0500] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: processing data csn 5faa14a8000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [102066] [09/Nov/2020:23:18:48.495377341 -0500] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Successfully committed csn 5faa14a8000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [102194] [09/Nov/2020:23:18:48.499569104 -0500] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Rolled up to csn 5faa14a8000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [102309] [09/Nov/2020:23:18:48.504526862 -0500] - DEBUG - replication - multimaster_mmr_postop - error 0 for operation 561. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [102483] [09/Nov/2020:23:18:48.513155223 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: wait_for_changes -> wait_for_changes [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [102665] [09/Nov/2020:23:18:48.518020887 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: wait_for_changes -> ready_to_acquire_replica [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [102836] [09/Nov/2020:23:18:48.522530585 -0500] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-53:39201) - Canceling linger on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [103003] [09/Nov/2020:23:18:48.530762381 -0500] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-139-53:39201): Replica was successfully acquired. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [103184] [09/Nov/2020:23:18:48.536544143 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: ready_to_acquire_replica -> sending_updates [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [103299] [09/Nov/2020:23:18:48.544751103 -0500] - DEBUG - csngen_adjust_time - gen state before 5faa14a80002:1604981928:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [103479] [09/Nov/2020:23:18:48.550636233 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-53:39201)): Consumer RUV: [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [103634] [09/Nov/2020:23:18:48.556579515 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replicageneration} 5faa144a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [103846] [09/Nov/2020:23:18:48.561205620 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replica 1 ldap://localhost.localdomain:39001} 5faa144a000100010000 5faa149f000000010000 00000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [104026] [09/Nov/2020:23:18:48.565709758 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-53:39201)): Supplier RUV: [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [104181] [09/Nov/2020:23:18:48.569960288 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replicageneration} 5faa144a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [104393] [09/Nov/2020:23:18:48.574165780 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replica 1 ldap://localhost.localdomain:39001} 5faa144a000100010000 5faa14a8000000010000 5faa14a8 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [104554] [09/Nov/2020:23:18:48.582256036 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_get_buffer - found thread private buffer cache 0x7f29bb578800 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [104780] [09/Nov/2020:23:18:48.587411396 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_get_buffer - _pool is 0x7f29e847abe0 _pool->pl_busy_lists is 0x7f29bb603ae0 _pool->pl_busy_lists->bl_buffers is 0x7f29bb578800 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [105074] [09/Nov/2020:23:18:48.593073346 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-53:39201) - (cscb 0 - state 0) - csnPrevMax () csnMax (5faa14a8000000010000) csnBuf (00000000000000000000) csnConsumerMax (5faa149f000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [105187] [09/Nov/2020:23:18:48.623753188 -0500] - DEBUG - clcache_initial_anchorcsn - anchor is now: 5faa149f000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [105377] [09/Nov/2020:23:18:48.638479747 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - agmt="cn=201" (ci-vm-10-0-139-53:39201): CSN 5faa149f000000010000 found, position set for replay [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [105532] [09/Nov/2020:23:18:48.645040519 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_get_next_change - load=1 rec=1 csn=5faa14a8000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [105644] [09/Nov/2020:23:18:48.650183136 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Starting [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [105863] [09/Nov/2020:23:18:48.654281018 -0500] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-53:39201): Sending modify operation (dn="cn=test_entry,dc=example,dc=com" csn=5faa14a8000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [105995] [09/Nov/2020:23:18:48.660213292 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [106192] [09/Nov/2020:23:18:48.665787351 -0500] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-53:39201): Consumer successfully sent operation with csn 5faa14a8000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [106324] [09/Nov/2020:23:18:48.681744444 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [106637] [09/Nov/2020:23:18:48.691771958 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_adjust_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-53:39201) - (cscb 0 - state 1) - csnPrevMax (5faa14a8000000010000) csnMax (5faa14a8000000010000) csnBuf (5faa14a8000000010000) csnConsumerMax (5faa14a8000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [106760] [09/Nov/2020:23:18:48.696287246 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_load_buffer - rc=-30988 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [106943] [09/Nov/2020:23:18:48.700158986 -0500] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-139-53:39201): No more updates to send (cl5GetNextOperationToReplay) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [107055] [09/Nov/2020:23:18:48.704358607 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 0 11 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [107188] [09/Nov/2020:23:18:48.708360243 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [107318] [09/Nov/2020:23:18:48.712708414 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Result 3, 0, 0, 11, (null) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [107451] [09/Nov/2020:23:18:48.716795897 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [107584] [09/Nov/2020:23:18:48.723522628 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [107717] [09/Nov/2020:23:18:48.730482236 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [107850] [09/Nov/2020:23:18:48.740135412 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [107983] [09/Nov/2020:23:18:48.755902401 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [108116] [09/Nov/2020:23:18:48.781825943 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [108229] [09/Nov/2020:23:18:48.808523263 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 11 11 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [108338] [09/Nov/2020:23:18:48.822266211 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain exiting [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [108609] [09/Nov/2020:23:18:48.831682681 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_return_buffer - session end: state=5 load=1 sent=1 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [108772] [09/Nov/2020:23:18:48.843898349 -0500] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-139-53:39201): Successfully released consumer [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [108941] [09/Nov/2020:23:18:48.849273024 -0500] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-139-53:39201) - Beginning linger on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [109114] [09/Nov/2020:23:18:48.853848751 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: sending_updates -> wait_for_changes [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [109222] [09/Nov/2020:23:18:49.961560671 -0500] - DEBUG - replication - copy_operation_parameters - replica is null. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [109330] [09/Nov/2020:23:18:50.059329706 -0500] - DEBUG - replication - copy_operation_parameters - replica is null. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [109452] [09/Nov/2020:23:18:50.082211720 -0500] - DEBUG - _csngen_adjust_local_time - gen state before 5faa14a80002:1604981928:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [109573] [09/Nov/2020:23:18:50.090523404 -0500] - DEBUG - _csngen_adjust_local_time - gen state after 5faa14aa0000:1604981930:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [109736] [09/Nov/2020:23:18:50.094497620 -0500] - DEBUG - NSMMReplicationPlugin - ruv_add_csn_inprogress - Successfully inserted csn 5faa14aa000000010000 into pending list [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [109916] [09/Nov/2020:23:18:50.100484801 -0500] - DEBUG - NSMMReplicationPlugin - purge_entry_state_information - From entry cn=test_entry,dc=example,dc=com up to CSN 5fa0da28000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [110161] [09/Nov/2020:23:18:50.105610747 -0500] - DEBUG - NSMMReplicationPlugin - write_changelog_and_ruv - Writing change for cn=test_entry,dc=example,dc=com (uniqid: 9537b309-230b11eb-94b3c8a1-8c2d92e7, optype: 8) to changelog csn 5faa14aa000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [110336] [09/Nov/2020:23:18:50.113828482 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - cl5WriteOperationTxn - Successfully written entry with csn (5faa14aa000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [110474] [09/Nov/2020:23:18:50.118440668 -0500] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: committing all csns for csn 5faa14aa000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [110604] [09/Nov/2020:23:18:50.122348654 -0500] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: processing data csn 5faa14aa000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [110742] [09/Nov/2020:23:18:50.126815673 -0500] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Successfully committed csn 5faa14aa000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [110870] [09/Nov/2020:23:18:50.143363531 -0500] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Rolled up to csn 5faa14aa000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [110985] [09/Nov/2020:23:18:50.152600166 -0500] - DEBUG - replication - multimaster_mmr_postop - error 0 for operation 561. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [111159] [09/Nov/2020:23:18:50.159024716 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: wait_for_changes -> wait_for_changes [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [111341] [09/Nov/2020:23:18:50.163131974 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: wait_for_changes -> ready_to_acquire_replica [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [111512] [09/Nov/2020:23:18:50.167215098 -0500] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-53:39201) - Canceling linger on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [111679] [09/Nov/2020:23:18:50.171866834 -0500] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-139-53:39201): Replica was successfully acquired. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [111860] [09/Nov/2020:23:18:50.174984525 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: ready_to_acquire_replica -> sending_updates [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [112044] [09/Nov/2020:23:18:50.177864407 -0500] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Checking consumer schema localcsn:5faa14aa000000000000 / remotecsn:5faa149f000000000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [112178] [09/Nov/2020:23:18:50.455499308 -0500] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Reread remotecsn:5faa149f000000000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [112362] [09/Nov/2020:23:18:50.459704792 -0500] - DEBUG - NSMMReplicationPlugin - conn_push_schema - Schema checking successful: ok to push the schema (agmt="cn=201" (ci-vm-10-0-139-53:39201)) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [112477] [09/Nov/2020:23:18:50.803356815 -0500] - DEBUG - csngen_adjust_time - gen state before 5faa14aa0002:1604981930:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [112657] [09/Nov/2020:23:18:50.808284248 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-53:39201)): Consumer RUV: [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [112812] [09/Nov/2020:23:18:50.811686561 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replicageneration} 5faa144a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [113024] [09/Nov/2020:23:18:50.814647373 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replica 1 ldap://localhost.localdomain:39001} 5faa144a000100010000 5faa14a8000000010000 00000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [113204] [09/Nov/2020:23:18:50.817700436 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-53:39201)): Supplier RUV: [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [113359] [09/Nov/2020:23:18:50.820932522 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replicageneration} 5faa144a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [113571] [09/Nov/2020:23:18:50.824156376 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replica 1 ldap://localhost.localdomain:39001} 5faa144a000100010000 5faa14aa000000010000 5faa14aa [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [113732] [09/Nov/2020:23:18:50.827878475 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_get_buffer - found thread private buffer cache 0x7f29bb578800 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [113958] [09/Nov/2020:23:18:50.831439275 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_get_buffer - _pool is 0x7f29e847abe0 _pool->pl_busy_lists is 0x7f29bb603ae0 _pool->pl_busy_lists->bl_buffers is 0x7f29bb578800 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [114252] [09/Nov/2020:23:18:50.835898453 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-53:39201) - (cscb 0 - state 0) - csnPrevMax () csnMax (5faa14aa000000010000) csnBuf (5faa14a8000000010000) csnConsumerMax (5faa14a8000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [114365] [09/Nov/2020:23:18:50.839286681 -0500] - DEBUG - clcache_initial_anchorcsn - anchor is now: 5faa14a8000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [114555] [09/Nov/2020:23:18:50.843483049 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - agmt="cn=201" (ci-vm-10-0-139-53:39201): CSN 5faa14a8000000010000 found, position set for replay [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [114710] [09/Nov/2020:23:18:50.848412320 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_get_next_change - load=1 rec=1 csn=5faa14aa000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [114822] [09/Nov/2020:23:18:50.852186009 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Starting [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [115041] [09/Nov/2020:23:18:50.856003664 -0500] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-53:39201): Sending modify operation (dn="cn=test_entry,dc=example,dc=com" csn=5faa14aa000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [115173] [09/Nov/2020:23:18:50.859149774 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [115370] [09/Nov/2020:23:18:50.863756290 -0500] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-53:39201): Consumer successfully sent operation with csn 5faa14aa000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [115683] [09/Nov/2020:23:18:50.866833970 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_adjust_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-53:39201) - (cscb 0 - state 1) - csnPrevMax (5faa14aa000000010000) csnMax (5faa14aa000000010000) csnBuf (5faa14aa000000010000) csnConsumerMax (5faa14aa000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [115806] [09/Nov/2020:23:18:50.870145040 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_load_buffer - rc=-30988 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [115989] [09/Nov/2020:23:18:50.874147970 -0500] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-139-53:39201): No more updates to send (cl5GetNextOperationToReplay) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [116101] [09/Nov/2020:23:18:50.879156699 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 0 18 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [116234] [09/Nov/2020:23:18:50.882430077 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 18 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [116364] [09/Nov/2020:23:18:50.885611447 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Result 3, 0, 0, 18, (null) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [116497] [09/Nov/2020:23:18:50.888943371 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 18 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [116630] [09/Nov/2020:23:18:50.893254867 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 18 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [116763] [09/Nov/2020:23:18:50.906478994 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 18 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [116896] [09/Nov/2020:23:18:50.914898572 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 18 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [117029] [09/Nov/2020:23:18:50.929422132 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 18 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [117162] [09/Nov/2020:23:18:50.949261060 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 18 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [117275] [09/Nov/2020:23:18:50.982577096 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 18 18 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [117384] [09/Nov/2020:23:18:50.989745761 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain exiting [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [117655] [09/Nov/2020:23:18:50.993682898 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_return_buffer - session end: state=5 load=1 sent=1 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [117818] [09/Nov/2020:23:18:51.006943842 -0500] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-139-53:39201): Successfully released consumer [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [117987] [09/Nov/2020:23:18:51.010787976 -0500] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-139-53:39201) - Beginning linger on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [118160] [09/Nov/2020:23:18:51.013928372 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: sending_updates -> wait_for_changes [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [118268] [09/Nov/2020:23:18:51.165764539 -0500] - DEBUG - replication - copy_operation_parameters - replica is null. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [118438] [09/Nov/2020:23:18:51.171200562 -0500] - DEBUG - NSMMReplicationPlugin - agmt_set_enabled_from_entry: agreement is now disabled (agmt="cn=201" (ci-vm-10-0-139-53:39201)) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [118612] [09/Nov/2020:23:18:51.174800880 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: wait_for_changes -> wait_for_changes [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [118786] [09/Nov/2020:23:18:51.186605371 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: wait_for_changes -> wait_for_changes [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [118957] [09/Nov/2020:23:18:51.190910888 -0500] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-53:39201) - Canceling linger on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [119131] [09/Nov/2020:23:18:51.200478819 -0500] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-139-53:39201) - Disconnected from the consumer [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [119295] [09/Nov/2020:23:18:51.274900715 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_stop - agmt="cn=201" (ci-vm-10-0-139-53:39201): Protocol stopped after 0 seconds [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [119423] [09/Nov/2020:23:18:51.278941255 -0500] - DEBUG - NSMMReplicationPlugin - Database RUV: {replicageneration} 5faa144a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [119608] [09/Nov/2020:23:18:51.282861691 -0500] - DEBUG - NSMMReplicationPlugin - Database RUV: {replica 1 ldap://localhost.localdomain:39001} 5faa144a000100010000 5faa14aa000000010000 5faa14aa [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [119782] [09/Nov/2020:23:18:51.289543881 -0500] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-139-53:39201) - Disconnected from the consumer [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [119890] [09/Nov/2020:23:18:51.294448817 -0500] - DEBUG - replication - copy_operation_parameters - replica is null. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [119998] [09/Nov/2020:23:18:56.320321669 -0500] - DEBUG - replication - copy_operation_parameters - replica is null. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [120167] [09/Nov/2020:23:18:56.337041857 -0500] - DEBUG - NSMMReplicationPlugin - agmt_set_enabled_from_entry: agreement is now enabled (agmt="cn=201" (ci-vm-10-0-139-53:39201)) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [120341] [09/Nov/2020:23:18:56.348331217 -0500] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-53:39201) - No linger to cancel on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [120515] [09/Nov/2020:23:18:56.352708528 -0500] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-139-53:39201) - Disconnected from the consumer [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [120686] [09/Nov/2020:23:18:56.359016790 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: start -> ready_to_acquire_replica [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [120854] [09/Nov/2020:23:18:56.363297103 -0500] - DEBUG - NSMMReplicationPlugin - conn_connect - agmt="cn=201" (ci-vm-10-0-139-53:39201) - Trying non-secure slapi_ldap_init_ext [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [121380] [09/Nov/2020:23:18:56.367470089 -0500] - DEBUG - NSMMReplicationPlugin - conn_connect - agmt="cn=201" (ci-vm-10-0-139-53:39201) - binddn = cn=ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701,ou=services,dc=example,dc=com, passwd = {AES-TUhNR0NTcUdTSWIzRFFFRkRUQm1NRVVHQ1NxR1NJYjNEUUVGRERBNEJDUmtaVFV5WlRsbVl5MWtORGhrTTJJdw0KTlMxaFl6VTNNamcwT1Mxak1XVm1aVEEyTlFBQ0FRSUNBU0F3Q2dZSUtvWklodmNOQWdjd0hRWUpZSVpJQVdVRA0KQkFFcUJCQjd4NlBETjhVdlBBM09zZHphOFloVw==}3YoYoYU1jYNs6pwA+AojP+tDTZgc5hoAizypeBWkBkQpzCU9c506zfLA5y7ps+Cb [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [121425] 1k5LmqhzJjEEsOEg+fFgQ7P0u9lX4gg0wKWkeZ0qr/Y= [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [121599] [09/Nov/2020:23:18:56.373187371 -0500] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-53:39201) - No linger to cancel on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [121721] [09/Nov/2020:23:18:56.381416656 -0500] - DEBUG - _csngen_adjust_local_time - gen state before 5faa14aa0002:1604981930:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [121842] [09/Nov/2020:23:18:56.385864816 -0500] - DEBUG - _csngen_adjust_local_time - gen state after 5faa14b00000:1604981936:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [122009] [09/Nov/2020:23:18:56.390700732 -0500] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-139-53:39201): Replica was successfully acquired. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [122190] [09/Nov/2020:23:18:56.394152203 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: ready_to_acquire_replica -> sending_updates [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [122374] [09/Nov/2020:23:18:56.397784259 -0500] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Checking consumer schema localcsn:5faa14aa000000000000 / remotecsn:5faa149f000000000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [122508] [09/Nov/2020:23:18:56.663684960 -0500] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Reread remotecsn:5faa14aa000000000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [122623] [09/Nov/2020:23:18:56.669707710 -0500] - DEBUG - csngen_adjust_time - gen state before 5faa14b00001:1604981936:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [122803] [09/Nov/2020:23:18:56.674610247 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-53:39201)): Consumer RUV: [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [122958] [09/Nov/2020:23:18:56.678316557 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replicageneration} 5faa144a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [123170] [09/Nov/2020:23:18:56.681861783 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replica 1 ldap://localhost.localdomain:39001} 5faa144a000100010000 5faa14aa000000010000 00000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [123350] [09/Nov/2020:23:18:56.685868343 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-53:39201)): Supplier RUV: [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [123505] [09/Nov/2020:23:18:56.690143430 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replicageneration} 5faa144a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [123717] [09/Nov/2020:23:18:56.694987373 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replica 1 ldap://localhost.localdomain:39001} 5faa144a000100010000 5faa14aa000000010000 5faa14aa [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [124011] [09/Nov/2020:23:18:56.698403118 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-53:39201) - (cscb 0 - state 1) - csnPrevMax () csnMax (5faa14aa000000010000) csnBuf (00000000000000000000) csnConsumerMax (5faa14aa000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [124134] [09/Nov/2020:23:18:56.701624593 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_load_buffer - rc=-30988 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [124405] [09/Nov/2020:23:18:56.705135068 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_return_buffer - session end: state=5 load=0 sent=0 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [124553] [09/Nov/2020:23:18:56.710016160 -0500] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-139-53:39201): No changes to send [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [124716] [09/Nov/2020:23:18:56.797532539 -0500] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-139-53:39201): Successfully released consumer [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [124885] [09/Nov/2020:23:18:56.801670903 -0500] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-139-53:39201) - Beginning linger on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [125058] [09/Nov/2020:23:18:56.808423717 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: sending_updates -> wait_for_changes [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [125180] [09/Nov/2020:23:18:58.363227581 -0500] - DEBUG - _csngen_adjust_local_time - gen state before 5faa14b00001:1604981936:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [125301] [09/Nov/2020:23:18:58.370494738 -0500] - DEBUG - _csngen_adjust_local_time - gen state after 5faa14b20000:1604981938:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [125464] [09/Nov/2020:23:18:58.376435853 -0500] - DEBUG - NSMMReplicationPlugin - ruv_add_csn_inprogress - Successfully inserted csn 5faa14b2000000010000 into pending list [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [125644] [09/Nov/2020:23:18:58.382628291 -0500] - DEBUG - NSMMReplicationPlugin - purge_entry_state_information - From entry cn=test_entry,dc=example,dc=com up to CSN 5fa0da2a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [125889] [09/Nov/2020:23:18:58.387029683 -0500] - DEBUG - NSMMReplicationPlugin - write_changelog_and_ruv - Writing change for cn=test_entry,dc=example,dc=com (uniqid: 9537b309-230b11eb-94b3c8a1-8c2d92e7, optype: 8) to changelog csn 5faa14b2000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [126064] [09/Nov/2020:23:18:58.390347490 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - cl5WriteOperationTxn - Successfully written entry with csn (5faa14b2000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [126202] [09/Nov/2020:23:18:58.394361995 -0500] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: committing all csns for csn 5faa14b2000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [126332] [09/Nov/2020:23:18:58.397450793 -0500] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: processing data csn 5faa14b2000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [126470] [09/Nov/2020:23:18:58.403686749 -0500] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Successfully committed csn 5faa14b2000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [126598] [09/Nov/2020:23:18:58.408081447 -0500] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Rolled up to csn 5faa14b2000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [126713] [09/Nov/2020:23:18:58.411127838 -0500] - DEBUG - replication - multimaster_mmr_postop - error 0 for operation 561. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [126887] [09/Nov/2020:23:18:58.418302919 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: wait_for_changes -> wait_for_changes [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [127069] [09/Nov/2020:23:18:58.424549478 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: wait_for_changes -> ready_to_acquire_replica [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [127240] [09/Nov/2020:23:18:58.429030284 -0500] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-53:39201) - Canceling linger on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [127407] [09/Nov/2020:23:18:58.435876028 -0500] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-139-53:39201): Replica was successfully acquired. [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [127588] [09/Nov/2020:23:18:58.439457146 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: ready_to_acquire_replica -> sending_updates [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [127703] [09/Nov/2020:23:18:58.446917331 -0500] - DEBUG - csngen_adjust_time - gen state before 5faa14b20002:1604981938:0:0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [127883] [09/Nov/2020:23:18:58.453077891 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-53:39201)): Consumer RUV: [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [128038] [09/Nov/2020:23:18:58.456619682 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replicageneration} 5faa144a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [128250] [09/Nov/2020:23:18:58.464424281 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replica 1 ldap://localhost.localdomain:39001} 5faa144a000100010000 5faa14aa000000010000 00000000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [128430] [09/Nov/2020:23:18:58.471244009 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-53:39201)): Supplier RUV: [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [128585] [09/Nov/2020:23:18:58.478072856 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replicageneration} 5faa144a000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [128797] [09/Nov/2020:23:18:58.482290121 -0500] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-53:39201): {replica 1 ldap://localhost.localdomain:39001} 5faa144a000100010000 5faa14b2000000010000 5faa14b2 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [128958] [09/Nov/2020:23:18:58.486079705 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_get_buffer - found thread private buffer cache 0x7f29bb578900 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [129184] [09/Nov/2020:23:18:58.491462825 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_get_buffer - _pool is 0x7f29e847abe0 _pool->pl_busy_lists is 0x7f29bb603ae0 _pool->pl_busy_lists->bl_buffers is 0x7f29bb578900 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [129478] [09/Nov/2020:23:18:58.495276092 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-53:39201) - (cscb 0 - state 0) - csnPrevMax () csnMax (5faa14b2000000010000) csnBuf (00000000000000000000) csnConsumerMax (5faa14aa000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [129591] [09/Nov/2020:23:18:58.499160568 -0500] - DEBUG - clcache_initial_anchorcsn - anchor is now: 5faa14aa000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [129781] [09/Nov/2020:23:18:58.503002859 -0500] - DEBUG - NSMMReplicationPlugin - changelog program - agmt="cn=201" (ci-vm-10-0-139-53:39201): CSN 5faa14aa000000010000 found, position set for replay [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [129936] [09/Nov/2020:23:18:58.507096469 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_get_next_change - load=1 rec=1 csn=5faa14b2000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [130155] [09/Nov/2020:23:18:58.510545388 -0500] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-53:39201): Sending modify operation (dn="cn=test_entry,dc=example,dc=com" csn=5faa14b2000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [130267] [09/Nov/2020:23:18:58.517109052 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Starting [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [130399] [09/Nov/2020:23:18:58.521216573 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [130596] [09/Nov/2020:23:18:58.526824327 -0500] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-53:39201): Consumer successfully sent operation with csn 5faa14b2000000010000 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [130909] [09/Nov/2020:23:18:58.531321460 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_adjust_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-53:39201) - (cscb 0 - state 1) - csnPrevMax (5faa14b2000000010000) csnMax (5faa14b2000000010000) csnBuf (5faa14b2000000010000) csnConsumerMax (5faa14b2000000010000) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [131032] [09/Nov/2020:23:18:58.541241992 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_load_buffer - rc=-30988 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [131215] [09/Nov/2020:23:18:58.547308649 -0500] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-139-53:39201): No more updates to send (cl5GetNextOperationToReplay) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [131327] [09/Nov/2020:23:18:58.551657692 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 0 11 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [131460] [09/Nov/2020:23:18:58.558547645 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [131590] [09/Nov/2020:23:18:58.566979080 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Result 3, 0, 0, 11, (null) [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [131723] [09/Nov/2020:23:18:58.571320701 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [131856] [09/Nov/2020:23:18:58.578209449 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [131989] [09/Nov/2020:23:18:58.583916041 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [132122] [09/Nov/2020:23:18:58.596327879 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [132255] [09/Nov/2020:23:18:58.608377629 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [132388] [09/Nov/2020:23:18:58.634308955 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [132501] [09/Nov/2020:23:18:58.658803872 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 11 11 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [132610] [09/Nov/2020:23:18:58.671884396 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain exiting [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [132881] [09/Nov/2020:23:18:58.678603860 -0500] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-53:39201) - clcache_return_buffer - session end: state=5 load=1 sent=1 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [133044] [09/Nov/2020:23:18:58.689900150 -0500] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-139-53:39201): Successfully released consumer [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [133213] [09/Nov/2020:23:18:58.696080350 -0500] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-139-53:39201) - Beginning linger on the connection [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [133386] [09/Nov/2020:23:18:58.702421216 -0500] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-53:39201): State: sending_updates -> wait_for_changes [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [133386] [35mDEBUG [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:67 _pattern_errorlog: end at offset 133386 [32mINFO [0m tests.suites.schema.schema_replication_test:schema_replication_test.py:693 Testcase PASSED | |||
Passed | suites/schema/schema_test.py::test_schema_comparewithfiles | 0.16 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.schema.schema_test:schema_test.py:125 Running test_schema_comparewithfiles... [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /etc/dirsrv/slapd-standalone1/schema/99user.ldif as a schema file - skipping [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/10mep-plugin.ldif as a schema file - skipping [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/60autofs.ldif as a schema file - skipping [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/10automember-plugin.ldif as a schema file - skipping [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/25java-object.ldif as a schema file - skipping [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/00core.ldif as a schema file - skipping [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/30ns-common.ldif as a schema file - skipping [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/60pam-plugin.ldif as a schema file - skipping [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/60posix-winsync-plugin.ldif as a schema file - skipping [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/28pilot.ldif as a schema file - skipping [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/02common.ldif as a schema file - skipping [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/50ns-directory.ldif as a schema file - skipping [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/10dna-plugin.ldif as a schema file - skipping [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/50ns-admin.ldif as a schema file - skipping [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/20subscriber.ldif as a schema file - skipping [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/10rfc2307compat.ldif as a schema file - skipping [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/60samba3.ldif as a schema file - skipping [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/60trust.ldif as a schema file - skipping [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/60mozilla.ldif as a schema file - skipping [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/60pureftpd.ldif as a schema file - skipping [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/05rfc4524.ldif as a schema file - skipping [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/60nss-ldap.ldif as a schema file - skipping [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/50ns-certificate.ldif as a schema file - skipping [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/60eduperson.ldif as a schema file - skipping [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/05rfc2927.ldif as a schema file - skipping [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/60acctpolicy.ldif as a schema file - skipping [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/50ns-web.ldif as a schema file - skipping [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/60sudo.ldif as a schema file - skipping [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/60rfc3712.ldif as a schema file - skipping [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/50ns-mail.ldif as a schema file - skipping [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/01core389.ldif as a schema file - skipping [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/05rfc4523.ldif as a schema file - skipping [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/50ns-value.ldif as a schema file - skipping [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/60sabayon.ldif as a schema file - skipping [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/06inetorgperson.ldif as a schema file - skipping [33mWARNING [0m tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/60rfc2739.ldif as a schema file - skipping [32mINFO [0m tests.suites.schema.schema_test:schema_test.py:166 test_schema_comparewithfiles: PASSED | |||
Passed | suites/setup_ds/dscreate_test.py::test_setup_ds_minimal_dry | 0.22 | |
-------------------------------Captured log setup------------------------------- [35mDEBUG [0m lib389:dscreate_test.py:42 Instance allocated [35mDEBUG [0m lib389:__init__.py:554 Allocate <class 'lib389.DirSrv'> with None [35mDEBUG [0m lib389:__init__.py:577 Allocate <class 'lib389.DirSrv'> with ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:54321 [35mDEBUG [0m lib389:__init__.py:602 Allocate <class 'lib389.DirSrv'> with ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:54321 [35mDEBUG [0m lib389:__init__.py:742 list instance not found in /etc/dirsrv/slapd-standalone/dse.ldif: standalone -------------------------------Captured log call-------------------------------- [35mDEBUG [0m lib389:__init__.py:742 list instance not found in /etc/dirsrv/slapd-standalone/dse.ldif: standalone [32mINFO [0m LogCapture.SetupDs:setup.py:670 NOOP: Dry run requested [35mDEBUG [0m lib389:__init__.py:742 list instance not found in /etc/dirsrv/slapd-standalone/dse.ldif: standalone | |||
Passed | suites/setup_ds/dscreate_test.py::test_setup_ds_minimal | 27.14 | |
-------------------------------Captured log setup------------------------------- [35mDEBUG [0m lib389:dscreate_test.py:42 Instance allocated [35mDEBUG [0m lib389:__init__.py:554 Allocate <class 'lib389.DirSrv'> with None [35mDEBUG [0m lib389:__init__.py:577 Allocate <class 'lib389.DirSrv'> with ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:54321 [35mDEBUG [0m lib389:__init__.py:602 Allocate <class 'lib389.DirSrv'> with ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:54321 [35mDEBUG [0m lib389:__init__.py:742 list instance not found in /etc/dirsrv/slapd-standalone/dse.ldif: standalone -------------------------------Captured log call-------------------------------- [35mDEBUG [0m lib389:__init__.py:742 list instance not found in /etc/dirsrv/slapd-standalone/dse.ldif: standalone [35mDEBUG [0m lib389:__init__.py:554 Allocate <class 'lib389.DirSrv'> with None [35mDEBUG [0m lib389:__init__.py:566 Allocate <class 'lib389.DirSrv'> with /var/run/slapd-standalone.socket [35mDEBUG [0m lib389:__init__.py:577 Allocate <class 'lib389.DirSrv'> with localhost:54321 [35mDEBUG [0m lib389:__init__.py:602 Allocate <class 'lib389.DirSrv'> with localhost:54321 [35mDEBUG [0m lib389:nss_ssl.py:197 nss cmd: /usr/bin/certutil -N -d /etc/dirsrv/slapd-standalone -f /etc/dirsrv/slapd-standalone/pwdfile.txt [35mDEBUG [0m lib389:nss_ssl.py:199 nss output: [35mDEBUG [0m lib389.nss_ssl:nss_ssl.py:314 nss cmd: /usr/bin/certutil -L -n Self-Signed-CA -d /etc/dirsrv/ssca/ [35mDEBUG [0m lib389:nss_ssl.py:559 CSR subject -> CN=ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com,givenName=00339197-35b6-46e8-8925-6f4c196843b8,O=testing,L=389ds,ST=Queensland,C=AU [35mDEBUG [0m lib389:nss_ssl.py:560 CSR alt_names -> ['ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com'] [35mDEBUG [0m lib389:nss_ssl.py:592 nss cmd: /usr/bin/certutil -R --keyUsage digitalSignature,nonRepudiation,keyEncipherment,dataEncipherment --nsCertType sslClient,sslServer --extKeyUsage clientAuth,serverAuth -s CN=ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com,givenName=00339197-35b6-46e8-8925-6f4c196843b8,O=testing,L=389ds,ST=Queensland,C=AU -8 ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com -g 4096 -d /etc/dirsrv/slapd-standalone -z /etc/dirsrv/slapd-standalone/noise.txt -f /etc/dirsrv/slapd-standalone/pwdfile.txt -a -o /etc/dirsrv/slapd-standalone/Server-Cert.csr [35mDEBUG [0m lib389.nss_ssl:nss_ssl.py:618 nss cmd: /usr/bin/certutil -C -d /etc/dirsrv/ssca/ -f /etc/dirsrv/ssca//pwdfile.txt -v 24 -a -i /etc/dirsrv/slapd-standalone/Server-Cert.csr -o /etc/dirsrv/slapd-standalone/Server-Cert.crt -c Self-Signed-CA [35mDEBUG [0m lib389:nss_ssl.py:242 nss cmd: /usr/bin/openssl rehash /etc/dirsrv/slapd-standalone [35mDEBUG [0m lib389:nss_ssl.py:646 nss cmd: /usr/bin/certutil -A -n Self-Signed-CA -t CT,, -a -i /etc/dirsrv/slapd-standalone/ca.crt -d /etc/dirsrv/slapd-standalone -f /etc/dirsrv/slapd-standalone/pwdfile.txt [35mDEBUG [0m lib389:nss_ssl.py:661 nss cmd: /usr/bin/certutil -A -n Server-Cert -t ,, -a -i /etc/dirsrv/slapd-standalone/Server-Cert.crt -d /etc/dirsrv/slapd-standalone -f /etc/dirsrv/slapd-standalone/pwdfile.txt [35mDEBUG [0m lib389:nss_ssl.py:670 nss cmd: /usr/bin/certutil -V -d /etc/dirsrv/slapd-standalone -n Server-Cert -u YCV [35mDEBUG [0m lib389.utils:utils.py:284 port 636 already in [389, 636, 3268, 3269, 7389], skipping port relabel [35mDEBUG [0m lib389.utils:utils.py:315 CMD: semanage port -a -t ldap_port_t -p tcp 54321 ; STDOUT: ; STDERR: [35mDEBUG [0m lib389:__init__.py:1173 systemd status -> True [35mDEBUG [0m lib389:__init__.py:1077 systemd status -> True [35mDEBUG [0m lib389:__init__.py:937 open(): Connecting to uri ldapi://%2Fvar%2Frun%2Fslapd-standalone.socket [35mDEBUG [0m lib389:__init__.py:945 Using dirsrv ca certificate /etc/dirsrv/slapd-standalone [35mDEBUG [0m lib389:__init__.py:954 Using external ca certificate /etc/dirsrv/slapd-standalone [35mDEBUG [0m lib389:__init__.py:967 Using external ca certificate /etc/dirsrv/slapd-standalone [35mDEBUG [0m lib389:__init__.py:975 Using certificate policy 1 [35mDEBUG [0m lib389:__init__.py:976 ldap.OPT_X_TLS_REQUIRE_CERT = 1 [35mDEBUG [0m lib389:__init__.py:1009 open(): Using root autobind ... [35mDEBUG [0m lib389:__init__.py:1030 open(): bound as cn=Directory Manager [35mDEBUG [0m lib389:__init__.py:1692 Retrieving entry with [('',)] [35mDEBUG [0m lib389:__init__.py:1702 Retrieved entry [dn: vendorVersion: 389-Directory/2.0.1 B2020.315.0017 ] [35mDEBUG [0m lib389:__init__.py:937 open(): Connecting to uri ldapi://%2Fvar%2Frun%2Fslapd-standalone.socket [35mDEBUG [0m lib389:__init__.py:945 Using dirsrv ca certificate /etc/dirsrv/slapd-standalone [35mDEBUG [0m lib389:__init__.py:954 Using external ca certificate /etc/dirsrv/slapd-standalone [35mDEBUG [0m lib389:__init__.py:967 Using external ca certificate /etc/dirsrv/slapd-standalone [35mDEBUG [0m lib389:__init__.py:975 Using certificate policy 1 [35mDEBUG [0m lib389:__init__.py:976 ldap.OPT_X_TLS_REQUIRE_CERT = 1 [35mDEBUG [0m lib389:__init__.py:1009 open(): Using root autobind ... [35mDEBUG [0m lib389:__init__.py:1030 open(): bound as cn=Directory Manager [35mDEBUG [0m lib389:__init__.py:1692 Retrieving entry with [('',)] [35mDEBUG [0m lib389:__init__.py:1702 Retrieved entry [dn: vendorVersion: 389-Directory/2.0.1 B2020.315.0017 ] [35mDEBUG [0m Config:_mapped_object.py:434 cn=config set REPLACE: ('nsslapd-secureport', '636') [35mDEBUG [0m Config:_mapped_object.py:434 cn=config set REPLACE: ('nsslapd-security', 'on') [35mDEBUG [0m Config:_mapped_object.py:434 cn=config set REPLACE: ('nsslapd-rootpw', '********') [35mDEBUG [0m lib389:__init__.py:1173 systemd status -> True [35mDEBUG [0m lib389:__init__.py:1146 systemd status -> True [35mDEBUG [0m lib389:__init__.py:1173 systemd status -> True [35mDEBUG [0m lib389:__init__.py:1077 systemd status -> True [35mDEBUG [0m lib389:__init__.py:937 open(): Connecting to uri ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:54321/ [35mDEBUG [0m lib389:__init__.py:945 Using dirsrv ca certificate /etc/dirsrv/slapd-standalone [35mDEBUG [0m lib389:__init__.py:954 Using external ca certificate /etc/dirsrv/slapd-standalone [35mDEBUG [0m lib389:__init__.py:967 Using external ca certificate /etc/dirsrv/slapd-standalone [35mDEBUG [0m lib389:__init__.py:975 Using certificate policy 1 [35mDEBUG [0m lib389:__init__.py:976 ldap.OPT_X_TLS_REQUIRE_CERT = 1 [35mDEBUG [0m lib389:__init__.py:1030 open(): bound as cn=Directory Manager [35mDEBUG [0m lib389:__init__.py:1692 Retrieving entry with [('',)] [35mDEBUG [0m lib389:__init__.py:1702 Retrieved entry [dn: vendorVersion: 389-Directory/2.0.1 B2020.315.0017 ] [35mDEBUG [0m lib389:__init__.py:1173 systemd status -> True [35mDEBUG [0m lib389:__init__.py:1146 systemd status -> True [35mDEBUG [0m lib389:__init__.py:1173 systemd status -> True [35mDEBUG [0m lib389:__init__.py:1077 systemd status -> True [35mDEBUG [0m lib389:__init__.py:937 open(): Connecting to uri ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:54321/ [35mDEBUG [0m lib389:__init__.py:945 Using dirsrv ca certificate /etc/dirsrv/slapd-standalone [35mDEBUG [0m lib389:__init__.py:954 Using external ca certificate /etc/dirsrv/slapd-standalone [35mDEBUG [0m lib389:__init__.py:967 Using external ca certificate /etc/dirsrv/slapd-standalone [35mDEBUG [0m lib389:__init__.py:975 Using certificate policy 1 [35mDEBUG [0m lib389:__init__.py:976 ldap.OPT_X_TLS_REQUIRE_CERT = 1 [35mDEBUG [0m lib389:__init__.py:1030 open(): bound as cn=Directory Manager [35mDEBUG [0m lib389:__init__.py:1692 Retrieving entry with [('',)] [35mDEBUG [0m lib389:__init__.py:1702 Retrieved entry [dn: vendorVersion: 389-Directory/2.0.1 B2020.315.0017 ] [35mDEBUG [0m lib389.remove_ds:remove.py:38 Removing instance standalone [35mDEBUG [0m lib389:__init__.py:1692 Retrieving entry with [('cn=config',)] [35mDEBUG [0m lib389:__init__.py:1702 Retrieved entry [dn: cn=config nsslapd-bakdir: /var/lib/dirsrv/slapd-standalone/bak ] [35mDEBUG [0m lib389:__init__.py:1692 Retrieving entry with [('cn=config',)] [35mDEBUG [0m lib389:__init__.py:1702 Retrieved entry [dn: cn=config nsslapd-certdir: /etc/dirsrv/slapd-standalone ] [35mDEBUG [0m lib389:__init__.py:1692 Retrieving entry with [('cn=config,cn=ldbm database,cn=plugins,cn=config',)] [35mDEBUG [0m lib389:__init__.py:1702 Retrieved entry [dn: cn=config,cn=ldbm database,cn=plugins,cn=config nsslapd-directory: /var/lib/dirsrv/slapd-standalone/db ] [35mDEBUG [0m lib389:__init__.py:1692 Retrieving entry with [('cn=bdb,cn=config,cn=ldbm database,cn=plugins,cn=config',)] [35mDEBUG [0m lib389:__init__.py:1702 Retrieved entry [dn: cn=bdb,cn=config,cn=ldbm database,cn=plugins,cn=config nsslapd-db-home-directory: /var/lib/dirsrv/slapd-standalone/db ] [35mDEBUG [0m lib389:__init__.py:1692 Retrieving entry with [('cn=config,cn=ldbm database,cn=plugins,cn=config',)] [35mDEBUG [0m lib389:__init__.py:1702 Retrieved entry [dn: cn=config,cn=ldbm database,cn=plugins,cn=config nsslapd-directory: /var/lib/dirsrv/slapd-standalone/db ] [35mDEBUG [0m lib389:__init__.py:1692 Retrieving entry with [('cn=config,cn=ldbm database,cn=plugins,cn=config',)] [35mDEBUG [0m lib389:__init__.py:1702 Retrieved entry [dn: cn=config,cn=ldbm database,cn=plugins,cn=config nsslapd-directory: /var/lib/dirsrv/slapd-standalone/db ] [35mDEBUG [0m lib389:__init__.py:1692 Retrieving entry with [('cn=config',)] [35mDEBUG [0m lib389:__init__.py:1702 Retrieved entry [dn: cn=config nsslapd-ldifdir: /var/lib/dirsrv/slapd-standalone/ldif ] [35mDEBUG [0m lib389:__init__.py:1692 Retrieving entry with [('cn=config',)] [35mDEBUG [0m lib389:__init__.py:1702 Retrieved entry [dn: cn=config nsslapd-lockdir: /var/lock/dirsrv/slapd-standalone ] [35mDEBUG [0m lib389:__init__.py:1692 Retrieving entry with [('cn=config',)] [35mDEBUG [0m lib389:__init__.py:1702 Retrieved entry [dn: cn=config nsslapd-instancedir: /usr/lib64/dirsrv/slapd-standalone ] [35mDEBUG [0m lib389.remove_ds:remove.py:67 Checking for instance marker at /etc/dirsrv/slapd-standalone/dse.ldif [35mDEBUG [0m lib389.remove_ds:remove.py:72 Found instance marker at /etc/dirsrv/slapd-standalone/dse.ldif! Proceeding to remove ... [35mDEBUG [0m lib389.remove_ds:remove.py:76 Stopping instance standalone [35mDEBUG [0m lib389:__init__.py:1173 systemd status -> True [35mDEBUG [0m lib389:__init__.py:1146 systemd status -> True [35mDEBUG [0m lib389.remove_ds:remove.py:79 Found instance marker at /etc/dirsrv/slapd-standalone/dse.ldif! Proceeding to remove ... [35mDEBUG [0m lib389.remove_ds:remove.py:83 Stopping instance standalone [35mDEBUG [0m lib389:__init__.py:1173 systemd status -> True [35mDEBUG [0m lib389.remove_ds:remove.py:92 Removing /var/lib/dirsrv/slapd-standalone/bak [35mDEBUG [0m lib389.remove_ds:remove.py:92 Removing /etc/dirsrv/slapd-standalone [35mDEBUG [0m lib389.remove_ds:remove.py:92 Removing /etc/dirsrv/slapd-standalone [35mDEBUG [0m lib389.remove_ds:remove.py:92 Removing /var/lib/dirsrv/slapd-standalone/db [35mDEBUG [0m lib389.remove_ds:remove.py:92 Removing /var/lib/dirsrv/slapd-standalone/db [35mDEBUG [0m lib389.remove_ds:remove.py:92 Removing /var/lib/dirsrv/slapd-standalone/db/../ [35mDEBUG [0m lib389.remove_ds:remove.py:92 Removing /var/lib/dirsrv/slapd-standalone/changelogdb [35mDEBUG [0m lib389.remove_ds:remove.py:92 Removing /var/lib/dirsrv/slapd-standalone/ldif [35mDEBUG [0m lib389.remove_ds:remove.py:92 Removing /var/lock/dirsrv/slapd-standalone [35mDEBUG [0m lib389.remove_ds:remove.py:92 Removing /var/log/dirsrv/slapd-standalone [35mDEBUG [0m lib389.remove_ds:remove.py:92 Removing /usr/lib64/dirsrv/slapd-standalone [35mDEBUG [0m lib389.remove_ds:remove.py:92 Removing /etc/sysconfig/dirsrv-standalone [35mDEBUG [0m lib389.remove_ds:remove.py:101 Removing the systemd symlink [35mDEBUG [0m lib389.remove_ds:remove.py:108 CMD: systemctl disable dirsrv@standalone ; STDOUT: ; STDERR: Removed /etc/systemd/system/multi-user.target.wants/dirsrv@standalone.service. [35mDEBUG [0m lib389.remove_ds:remove.py:110 Removing /etc/tmpfiles.d/dirsrv-standalone.conf [35mDEBUG [0m lib389.remove_ds:remove.py:119 Removing the port labels [35mDEBUG [0m lib389.remove_ds:remove.py:149 Moving /etc/dirsrv/slapd-standalone to /etc/dirsrv/slapd-standalone.removed [35mDEBUG [0m lib389.remove_ds:remove.py:159 Complete | |||
Passed | suites/setup_ds/dscreate_test.py::test_setup_ds_inf_minimal | 0.00 | |
-------------------------------Captured log setup------------------------------- [35mDEBUG [0m lib389:dscreate_test.py:42 Instance allocated [35mDEBUG [0m lib389:__init__.py:554 Allocate <class 'lib389.DirSrv'> with None [35mDEBUG [0m lib389:__init__.py:577 Allocate <class 'lib389.DirSrv'> with ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:54321 [35mDEBUG [0m lib389:__init__.py:602 Allocate <class 'lib389.DirSrv'> with ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:54321 [35mDEBUG [0m lib389:__init__.py:742 list instance not found in /etc/dirsrv/slapd-standalone/dse.ldif: standalone | |||
Passed | suites/setup_ds/remove_test.py::test_basic[True] | 4.55 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/setup_ds/remove_test.py::test_basic[False] | 5.13 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/state/mmt_state_test.py::test_check_desc_attr_state[description-Test1usr1-ldap.MOD_ADD-exp_values0-vucsn] | 0.09 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 9b22744f-eb06-4fae-8546-509ef2b8a3f8 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 6e83ee4c-4407-4369-8aca-9c6ba14be6e3 / got description=9b22744f-eb06-4fae-8546-509ef2b8a3f8) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:99 Add user: state1test [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:107 Check if list of description attrs present for: state1test [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:110 Checking for operational attributes [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: state1test | |||
Passed | suites/state/mmt_state_test.py::test_check_desc_attr_state[description-Test1usr2-ldap.MOD_ADD-exp_values1-vucsn] | 0.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:99 Add user: state1test [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:107 Check if list of description attrs present for: state1test [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:110 Checking for operational attributes [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: state1test | |||
Passed | suites/state/mmt_state_test.py::test_check_desc_attr_state[description-Test1usr3-ldap.MOD_ADD-exp_values2-vucsn] | 0.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:99 Add user: state1test [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:107 Check if list of description attrs present for: state1test [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:110 Checking for operational attributes [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: state1test | |||
Passed | suites/state/mmt_state_test.py::test_check_desc_attr_state[description-Test1usr4-ldap.MOD_REPLACE-exp_values3-adcsn] | 0.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:99 Add user: state1test [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:107 Check if list of description attrs present for: state1test [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:110 Checking for operational attributes [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: state1test | |||
Passed | suites/state/mmt_state_test.py::test_check_desc_attr_state[description-Test1usr4-ldap.MOD_DELETE-exp_values4-vdcsn] | 0.03 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:99 Add user: state1test [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:107 Check if list of description attrs present for: state1test [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:110 Checking for operational attributes [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: state1test | |||
Passed | suites/state/mmt_state_test.py::test_check_cn_attr_state[cn-TestCN1-ldap.MOD_ADD-exp_values0-vucsn] | 0.07 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:153 Add user: TestCNusr1 [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:166 Check if list of cn attrs present for: TestCNusr1 [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:168 Checking for operational attributes [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: TestCNusr1 | |||
Passed | suites/state/mmt_state_test.py::test_check_cn_attr_state[cn-TestCN2-ldap.MOD_ADD-exp_values1-vucsn] | 0.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:153 Add user: TestCNusr1 [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:166 Check if list of cn attrs present for: TestCNusr1 [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:168 Checking for operational attributes [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: TestCNusr1 | |||
Passed | suites/state/mmt_state_test.py::test_check_cn_attr_state[cn-TestnewCN3-ldap.MOD_REPLACE-exp_values2-adcsn] | 0.28 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:153 Add user: TestCNusr1 [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:166 Check if list of cn attrs present for: TestCNusr1 [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:168 Checking for operational attributes [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: TestCNusr1 | |||
Passed | suites/state/mmt_state_test.py::test_check_cn_attr_state[cn-TestnewCN3-ldap.MOD_DELETE-None-None] | 0.05 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:153 Add user: TestCNusr1 [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:168 Checking for operational attributes [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: TestCNusr1 | |||
Passed | suites/state/mmt_state_test.py::test_check_single_value_attr_state[preferredlanguage-Chinese-ldap.MOD_REPLACE-exp_values0-vucsn] | 0.07 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:206 Add user: Langusr1 [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:219 Check if list of cn attrs present for: Langusr1 [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:221 Checking for operational attributes [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: Langusr1 | |||
Passed | suites/state/mmt_state_test.py::test_check_single_value_attr_state[preferredlanguage-French-ldap.MOD_ADD-None-None] | 0.05 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:206 Add user: Langusr1 [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:221 Checking for operational attributes [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: Langusr1 | |||
Passed | suites/state/mmt_state_test.py::test_check_single_value_attr_state[preferredlanguage-German-ldap.MOD_REPLACE-exp_values2-adcsn] | 0.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:206 Add user: Langusr1 [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:219 Check if list of cn attrs present for: Langusr1 [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:221 Checking for operational attributes [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: Langusr1 | |||
Passed | suites/state/mmt_state_test.py::test_check_single_value_attr_state[preferredlanguage-German-ldap.MOD_DELETE-exp_values3-vdcsn] | 0.08 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:206 Add user: Langusr1 [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:219 Check if list of cn attrs present for: Langusr1 [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:221 Checking for operational attributes [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: Langusr1 | |||
Passed | suites/state/mmt_state_test.py::test_check_subtype_attr_state[roomnumber;office-Tower1-ldap.MOD_ADD-exp_values0-vucsn] | 0.10 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:276 Add user: roomoffice1usr [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:285 Check if list of roomnumber;office attributes are present for a given entry [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:287 Checking if operational attributes are present for cn [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: roomoffice1usr | |||
Passed | suites/state/mmt_state_test.py::test_check_subtype_attr_state[roomnumber;office-Tower2-ldap.MOD_ADD-exp_values1-vucsn] | 0.03 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:276 Add user: roomoffice1usr [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:285 Check if list of roomnumber;office attributes are present for a given entry [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:287 Checking if operational attributes are present for cn [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: roomoffice1usr | |||
Passed | suites/state/mmt_state_test.py::test_check_subtype_attr_state[roomnumber;office-Tower3-ldap.MOD_ADD-exp_values2-vucsn] | 0.03 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:276 Add user: roomoffice1usr [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:285 Check if list of roomnumber;office attributes are present for a given entry [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:287 Checking if operational attributes are present for cn [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: roomoffice1usr | |||
Passed | suites/state/mmt_state_test.py::test_check_subtype_attr_state[roomnumber;office-Tower4-ldap.MOD_REPLACE-exp_values3-adcsn] | 0.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:276 Add user: roomoffice1usr [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:285 Check if list of roomnumber;office attributes are present for a given entry [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:287 Checking if operational attributes are present for cn [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: roomoffice1usr | |||
Passed | suites/state/mmt_state_test.py::test_check_subtype_attr_state[roomnumber;office-Tower4-ldap.MOD_DELETE-exp_values4-vucsn] | 0.03 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:276 Add user: roomoffice1usr [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:285 Check if list of roomnumber;office attributes are present for a given entry [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:287 Checking if operational attributes are present for cn [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: roomoffice1usr | |||
Passed | suites/state/mmt_state_test.py::test_check_jpeg_attr_state[jpegphoto-thedeadbeef1-ldap.MOD_ADD-exp_values0-vucsn] | 0.07 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:342 Add user: testJpeg1usr [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:351 Check if list of jpeg attributes are present for a given entry [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:353 Checking if operational attributes are present for cn [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: testJpeg1usr | |||
Passed | suites/state/mmt_state_test.py::test_check_jpeg_attr_state[jpegphoto-thedeadbeef2-ldap.MOD_ADD-exp_values1-vucsn] | 0.03 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:342 Add user: testJpeg1usr [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:351 Check if list of jpeg attributes are present for a given entry [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:353 Checking if operational attributes are present for cn [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: testJpeg1usr | |||
Passed | suites/state/mmt_state_test.py::test_check_jpeg_attr_state[jpegphoto-thedeadbeef3-ldap.MOD_ADD-exp_values2-vucsn] | 0.03 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:342 Add user: testJpeg1usr [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:351 Check if list of jpeg attributes are present for a given entry [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:353 Checking if operational attributes are present for cn [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: testJpeg1usr | |||
Passed | suites/state/mmt_state_test.py::test_check_jpeg_attr_state[jpegphoto-thedeadbeef2-ldap.MOD_REPLACE-exp_values3-adcsn] | 0.04 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:342 Add user: testJpeg1usr [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:351 Check if list of jpeg attributes are present for a given entry [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:353 Checking if operational attributes are present for cn [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: testJpeg1usr | |||
Passed | suites/state/mmt_state_test.py::test_check_jpeg_attr_state[jpegphoto-thedeadbeef2-ldap.MOD_DELETE-exp_values4-vdcsn] | 0.03 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:342 Add user: testJpeg1usr [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:351 Check if list of jpeg attributes are present for a given entry [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:353 Checking if operational attributes are present for cn [32mINFO [0m tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: testJpeg1usr | |||
Passed | suites/syncrepl_plugin/basic_test.py::test_syncrepl_basic | 5.54 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/syncrepl_plugin/basic_test.py::test_sync_repl_mep | 26.12 | |
------------------------------Captured stdout call------------------------------ syncrepl_poll: LDAP error (%s) {'result': -1, 'desc': "Can't contact LDAP server", 'ctrls': []} | |||
Passed | suites/syncrepl_plugin/openldap_test.py::test_syncrepl_openldap | 5.62 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/syntax/acceptance_test.py::test_valid | 4.27 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.topologies.tests.suites.syntax.acceptance_test:acceptance_test.py:54 Clean the error log [32mINFO [0m lib389.topologies.tests.suites.syntax.acceptance_test:acceptance_test.py:58 Attempting to add task entry... [32mINFO [0m lib389.topologies.tests.suites.syntax.acceptance_test:acceptance_test.py:65 Found 0 invalid entries - Success | |||
Passed | suites/syntax/acceptance_test.py::test_invalid_uidnumber | 6.00 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.topologies.tests.suites.syntax.acceptance_test:acceptance_test.py:91 Clean the error log [32mINFO [0m lib389.topologies.tests.suites.syntax.acceptance_test:acceptance_test.py:98 Attempting to add task entry... [32mINFO [0m lib389.topologies.tests.suites.syntax.acceptance_test:acceptance_test.py:105 Found an invalid entry with wrong uidNumber - Success | |||
Passed | suites/syntax/mr_test.py::test_sss_mr | 7.16 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.syntax.mr_test:mr_test.py:42 Creating LDIF... [32mINFO [0m tests.suites.syntax.mr_test:mr_test.py:47 Importing LDIF... [32mINFO [0m tests.suites.syntax.mr_test:mr_test.py:52 Search using server side sorting using undefined mr in the attr... [32mINFO [0m tests.suites.syntax.mr_test:mr_test.py:62 Test PASSED | |||
Passed | suites/tls/cipher_test.py::test_long_cipher_list | 18.78 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/tls/ssl_version_test.py::test_ssl_version_range | 73.17 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.tls.ssl_version_test:ssl_version_test.py:36 default min: TLS1.2 max: TLS1.3 | |||
Passed | suites/tls/tls_cert_namespace_test.py::test_pem_cert_in_private_namespace | 11.67 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:54 Enable TLS [32mINFO [0m tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:57 Checking PrivateTmp value [32mINFO [0m tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:59 Command used : systemctl show -p PrivateTmp dirsrv@standalone1.service [32mINFO [0m tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:63 Check files in private /tmp [32mINFO [0m tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:68 Check that Self-Signed-CA.pem is present in private /tmp [32mINFO [0m tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:68 Check that Server-Cert-Key.pem is present in private /tmp [32mINFO [0m tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:68 Check that Server-Cert.pem is present in private /tmp [32mINFO [0m tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:71 Check instance cert directory [32mINFO [0m tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:75 Check that Self-Signed-CA.pem is not present in /etc/dirsrv/slapd-standalone1/ directory [32mINFO [0m tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:75 Check that Server-Cert-Key.pem is not present in /etc/dirsrv/slapd-standalone1/ directory [32mINFO [0m tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:75 Check that Server-Cert.pem is not present in /etc/dirsrv/slapd-standalone1/ directory | |||
Passed | suites/tls/tls_cert_namespace_test.py::test_cert_category_authority | 12.82 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:106 Enable TLS [32mINFO [0m tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:109 Get certificate path [32mINFO [0m tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:115 Check that Self-Signed-CA.pem is present [32mINFO [0m tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:119 Trust the certificate [32mINFO [0m tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:122 Search if our certificate has category: authority | |||
Passed | suites/tls/tls_check_crl_test.py::test_tls_check_crl | 12.19 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/tls/tls_ldaps_only_test.py::test_tls_ldaps_only | 25.50 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | tickets/ticket47560_test.py::test_ticket47560 | 18.41 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:tasks.py:877 fixupMemberOf task fixupmemberof_11092020_232733 for basedn dc=example,dc=com completed successfully | |||
Passed | tickets/ticket47573_test.py::test_ticket47573_init | 0.11 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for consumer1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:169 Joining consumer consumer1 from master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect b6a25e14-8483-46db-b3e4-42cc758f3bf7 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is working [32mINFO [0m lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 [32mINFO [0m lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 already exists -------------------------------Captured log call-------------------------------- [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:123 test_ticket47573_init topology_m1c1 <lib389.topologies.TopologyMain object at 0x7faa45fa28e0> (master <lib389.DirSrv object at 0x7faa54bcd220>, consumer <lib389.DirSrv object at 0x7faa54bcd910> | |||
Passed | tickets/ticket47573_test.py::test_ticket47573_one | 1.28 | |
-------------------------------Captured log call-------------------------------- [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:147 test_ticket47573_one topology_m1c1 <lib389.topologies.TopologyMain object at 0x7faa45fa28e0> (master <lib389.DirSrv object at 0x7faa54bcd220>, consumer <lib389.DirSrv object at 0x7faa54bcd910> [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:113 trigger_schema_push: receive 0 (expected 1) [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:161 test_ticket47573_one master_schema_csn=b'5faa16e2000000000000' [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:162 ctest_ticket47573_one onsumer_schema_csn=b'5faa16e2000000000000' [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:47 _pattern_errorlog: start at offset 0 [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [36] 389-Directory/2.0.1 B2020.315.0017 [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [93] localhost.localdomain:39001 (/etc/dirsrv/slapd-master1) [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [94] [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [196] [09/Nov/2020:23:27:50.917289312 -0500] - INFO - main - 389-Directory/2.0.1 B2020.315.0017 starting up [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [304] [09/Nov/2020:23:27:50.922351758 -0500] - INFO - main - Setting the maximum file descriptor limit to: 524288 [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [396] [09/Nov/2020:23:27:50.937086948 -0500] - ERR - allow_operation - Component identity is NULL [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [504] [09/Nov/2020:23:27:52.139421528 -0500] - INFO - PBKDF2_SHA256 - Based on CPU performance, chose 2048 rounds [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [618] [09/Nov/2020:23:27:52.157364847 -0500] - INFO - bdb_config_upgrade_dse_info - create config entry from old config [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [720] [09/Nov/2020:23:27:52.173052059 -0500] - NOTICE - bdb_start_autotune - found 7980860k physical memory [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [816] [09/Nov/2020:23:27:52.186552122 -0500] - NOTICE - bdb_start_autotune - found 7304148k available [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [923] [09/Nov/2020:23:27:52.191509279 -0500] - NOTICE - bdb_start_autotune - cache autosizing: db cache: 498803k [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [1026] [09/Nov/2020:23:27:52.195739878 -0500] - NOTICE - bdb_start_autotune - total cache size: 408620032 B; [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [1162] [09/Nov/2020:23:27:52.532109071 -0500] - INFO - slapd_daemon - slapd started. Listening on All Interfaces port 39001 for LDAP requests [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [1287] [09/Nov/2020:23:27:52.538353533 -0500] - INFO - slapd_daemon - Listening on /var/run/slapd-master1.socket for LDAPI requests [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [1446] [09/Nov/2020:23:27:52.822540712 -0500] - INFO - postop_modify_config_dse - The change of nsslapd-securePort will not take effect until the server is restarted [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [1563] [09/Nov/2020:23:27:52.835957081 -0500] - INFO - ldbm_instance_config_cachememsize_set - force a minimal value 512000 [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [1741] [09/Nov/2020:23:27:53.669746860 -0500] - INFO - op_thread_cleanup - slapd shutting down - signaling operation threads - op stack size 1 max work q size 1 max work q stack size 1 [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [1862] [09/Nov/2020:23:27:53.680889083 -0500] - INFO - slapd_daemon - slapd shutting down - waiting for 10 threads to terminate [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [1992] [09/Nov/2020:23:27:53.686907802 -0500] - INFO - slapd_daemon - slapd shutting down - closing down internal subsystems and plugins [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [2095] [09/Nov/2020:23:27:53.696383183 -0500] - INFO - bdb_pre_close - Waiting for 4 database threads to stop [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [2192] [09/Nov/2020:23:27:54.940447171 -0500] - INFO - bdb_pre_close - All database threads now stopped [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [2303] [09/Nov/2020:23:27:54.997797960 -0500] - INFO - ldbm_back_instance_set_destructor - Set of instances destroyed [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [2464] [09/Nov/2020:23:27:55.003497529 -0500] - INFO - connection_post_shutdown_cleanup - slapd shutting down - freed 1 work q stack objects - freed 1 op stack objects [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [2534] [09/Nov/2020:23:27:55.015373964 -0500] - INFO - main - slapd stopped. [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [2636] [09/Nov/2020:23:27:56.434762455 -0500] - INFO - main - 389-Directory/2.0.1 B2020.315.0017 starting up [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [2744] [09/Nov/2020:23:27:56.445399080 -0500] - INFO - main - Setting the maximum file descriptor limit to: 524288 [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [2836] [09/Nov/2020:23:27:56.480908107 -0500] - ERR - allow_operation - Component identity is NULL [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [2944] [09/Nov/2020:23:27:57.571473681 -0500] - INFO - PBKDF2_SHA256 - Based on CPU performance, chose 2048 rounds [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [3061] [09/Nov/2020:23:27:57.579089239 -0500] - INFO - ldbm_instance_config_cachememsize_set - force a minimal value 512000 [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [3163] [09/Nov/2020:23:27:57.589046019 -0500] - NOTICE - bdb_start_autotune - found 7980860k physical memory [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [3259] [09/Nov/2020:23:27:57.596588901 -0500] - NOTICE - bdb_start_autotune - found 7303716k available [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [3366] [09/Nov/2020:23:27:57.600244109 -0500] - NOTICE - bdb_start_autotune - cache autosizing: db cache: 498803k [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [3496] [09/Nov/2020:23:27:57.604219553 -0500] - NOTICE - bdb_start_autotune - cache autosizing: userRoot entry cache (1 total): 1376256k [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [3622] [09/Nov/2020:23:27:57.608906668 -0500] - NOTICE - bdb_start_autotune - cache autosizing: userRoot dn cache (1 total): 196608k [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [3726] [09/Nov/2020:23:27:57.612596688 -0500] - NOTICE - bdb_start_autotune - total cache size: 1834683392 B; [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [3862] [09/Nov/2020:23:27:57.740106877 -0500] - INFO - slapd_daemon - slapd started. Listening on All Interfaces port 39001 for LDAP requests [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [3987] [09/Nov/2020:23:27:57.747690293 -0500] - INFO - slapd_daemon - Listening on /var/run/slapd-master1.socket for LDAPI requests [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [4175] [09/Nov/2020:23:28:10.267298007 -0500] - NOTICE - NSMMReplicationPlugin - changelog program - _cl5ConstructRUV - Rebuilding the replication changelog RUV, this may take several minutes... [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [4355] [09/Nov/2020:23:28:10.272618288 -0500] - NOTICE - NSMMReplicationPlugin - changelog program - _cl5ConstructRUV - Rebuilding replication changelog RUV complete. Result 0 (Success) [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [4543] [09/Nov/2020:23:28:10.276486469 -0500] - NOTICE - NSMMReplicationPlugin - changelog program - _cl5ConstructRUV - Rebuilding the replication changelog RUV, this may take several minutes... [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [4723] [09/Nov/2020:23:28:10.282070840 -0500] - NOTICE - NSMMReplicationPlugin - changelog program - _cl5ConstructRUV - Rebuilding replication changelog RUV complete. Result 0 (Success) [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [4893] [09/Nov/2020:23:28:10.861983068 -0500] - INFO - NSMMReplicationPlugin - repl5_tot_run - Beginning total update of replica "agmt="cn=temp_201" (ci-vm-10-0-139-53:39201)". [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [5077] [09/Nov/2020:23:28:10.867697152 -0500] - NOTICE - NSMMReplicationPlugin - replica_subentry_check - Need to create replication keep alive entry <cn=repl keep alive 1,dc=example,dc=com> [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [5222] [09/Nov/2020:23:28:10.871772849 -0500] - INFO - NSMMReplicationPlugin - replica_subentry_create - add dn: cn=repl keep alive 1,dc=example,dc=com [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [5239] objectclass: top [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [5265] objectclass: ldapsubentry [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [5295] objectclass: extensibleObject [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [5317] cn: repl keep alive 1 [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [5503] [09/Nov/2020:23:28:13.967240726 -0500] - INFO - NSMMReplicationPlugin - repl5_tot_run - Finished total update of replica "agmt="cn=temp_201" (ci-vm-10-0-139-53:39201)". Sent 16 entries. [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [5503] [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:59 _pattern_errorlog: end at offset 5503 | |||
Passed | tickets/ticket47573_test.py::test_ticket47573_two | 1.37 | |
-------------------------------Captured log call-------------------------------- [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:113 trigger_schema_push: receive b'1' (expected 2) [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:192 test_ticket47573_two master_schema_csn=b'5faa16e3000000000000' [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:193 test_ticket47573_two consumer_schema_csn=b'5faa16e3000000000000' [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:47 _pattern_errorlog: start at offset 5504 [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [5504] [35mDEBUG [0m tests.tickets.ticket47573_test:ticket47573_test.py:59 _pattern_errorlog: end at offset 5504 | |||
Passed | tickets/ticket47573_test.py::test_ticket47573_three | 1.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket47573_test:ticket47573_test.py:228 Testcase PASSED | |||
Passed | tickets/ticket47619_test.py::test_ticket47619_init | 7.20 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for consumer1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:169 Joining consumer consumer1 from master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect beb7df20-9b45-4737-95d0-dd298db13669 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is working [32mINFO [0m lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 [32mINFO [0m lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 already exists -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47619_test.py:48 test_ticket47619_init topology_m1c1 <lib389.topologies.TopologyMain object at 0x7faa45fb4f10> [32mINFO [0m lib389:ticket47619_test.py:61 test_ticket47619_init: 100 entries ADDed other_entry[0..99] | |||
Passed | tickets/ticket47619_test.py::test_ticket47619_create_index | 5.43 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:backend.py:80 List backend with suffix=cn=changelog [32mINFO [0m lib389:backend.py:80 List backend with suffix=cn=changelog [32mINFO [0m lib389:backend.py:80 List backend with suffix=cn=changelog [32mINFO [0m lib389:backend.py:80 List backend with suffix=cn=changelog [32mINFO [0m lib389:backend.py:80 List backend with suffix=cn=changelog [32mINFO [0m lib389:backend.py:80 List backend with suffix=cn=changelog [32mINFO [0m lib389:backend.py:80 List backend with suffix=cn=changelog [32mINFO [0m lib389:backend.py:80 List backend with suffix=cn=changelog | |||
Passed | tickets/ticket47619_test.py::test_ticket47619_reindex | 15.27 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:tasks.py:798 Index task index_attrs_11092020_232911 completed successfully [32mINFO [0m lib389:tasks.py:798 Index task index_attrs_11092020_232913 completed successfully [32mINFO [0m lib389:tasks.py:798 Index task index_attrs_11092020_232915 completed successfully [32mINFO [0m lib389:tasks.py:798 Index task index_attrs_11092020_232917 completed successfully [32mINFO [0m lib389:tasks.py:798 Index task index_attrs_11092020_232919 completed successfully [32mINFO [0m lib389:tasks.py:798 Index task index_attrs_11092020_232920 completed successfully [32mINFO [0m lib389:tasks.py:798 Index task index_attrs_11092020_232922 completed successfully [32mINFO [0m lib389:tasks.py:798 Index task index_attrs_11092020_232924 completed successfully | |||
Passed | tickets/ticket47619_test.py::test_ticket47619_check_indexed_search | 0.28 | |
No log output captured. | |||
Passed | tickets/ticket47640_test.py::test_ticket47640 | 0.38 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket47640_test:ticket47640_test.py:65 Add operation correctly rejected. [32mINFO [0m tests.tickets.ticket47640_test:ticket47640_test.py:75 Test complete | |||
Passed | tickets/ticket47653MMR_test.py::test_ticket47653_init | 0.39 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0b87af4c-d6e5-41a5-a313-385b1fefc94b / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect e7fe50c3-f42f-46f8-b5ed-8f6f985e1568 / got description=0b87af4c-d6e5-41a5-a313-385b1fefc94b) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47653MMR_test.py:72 Add OCticket47653 that allows 'member' attribute [32mINFO [0m lib389:ticket47653MMR_test.py:77 Add cn=bind_entry, dc=example,dc=com | |||
Passed | tickets/ticket47653MMR_test.py::test_ticket47653_add | 5.30 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47653MMR_test.py:114 ######################### ADD ###################### [32mINFO [0m lib389:ticket47653MMR_test.py:117 Bind as cn=bind_entry, dc=example,dc=com [32mINFO [0m lib389:ticket47653MMR_test.py:147 Try to add Add cn=test_entry, dc=example,dc=com (aci is missing): dn: cn=test_entry, dc=example,dc=com cn: test_entry member: cn=bind_entry, dc=example,dc=com objectclass: top objectclass: person objectclass: OCticket47653 postalAddress: here postalCode: 1234 sn: test_entry [32mINFO [0m lib389:ticket47653MMR_test.py:151 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m lib389:ticket47653MMR_test.py:155 Bind as cn=Directory Manager and add the ADD SELFDN aci [32mINFO [0m lib389:ticket47653MMR_test.py:168 Bind as cn=bind_entry, dc=example,dc=com [32mINFO [0m lib389:ticket47653MMR_test.py:173 Try to add Add cn=test_entry, dc=example,dc=com (member is missing) [32mINFO [0m lib389:ticket47653MMR_test.py:181 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m lib389:ticket47653MMR_test.py:188 Try to add Add cn=test_entry, dc=example,dc=com (with several member values) [32mINFO [0m lib389:ticket47653MMR_test.py:191 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m lib389:ticket47653MMR_test.py:195 Try to add Add cn=test_entry, dc=example,dc=com should be successful [32mINFO [0m lib389:ticket47653MMR_test.py:206 Try to retrieve cn=test_entry, dc=example,dc=com from Master2 [32mINFO [0m lib389:ticket47653MMR_test.py:218 Update cn=test_entry, dc=example,dc=com on M2 | |||
Passed | tickets/ticket47653MMR_test.py::test_ticket47653_modify | 4.31 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47653MMR_test.py:248 Bind as cn=bind_entry, dc=example,dc=com [32mINFO [0m lib389:ticket47653MMR_test.py:251 ######################### MODIFY ###################### [32mINFO [0m lib389:ticket47653MMR_test.py:255 Try to modify cn=test_entry, dc=example,dc=com (aci is missing) [32mINFO [0m lib389:ticket47653MMR_test.py:259 Exception (expected): INSUFFICIENT_ACCESS [32mINFO [0m lib389:ticket47653MMR_test.py:263 Bind as cn=Directory Manager and add the WRITE SELFDN aci [32mINFO [0m lib389:ticket47653MMR_test.py:277 M1: Bind as cn=bind_entry, dc=example,dc=com [32mINFO [0m lib389:ticket47653MMR_test.py:282 M1: Try to modify cn=test_entry, dc=example,dc=com. It should succeeds [32mINFO [0m lib389:ticket47653MMR_test.py:286 M1: Bind as cn=Directory Manager [32mINFO [0m lib389:ticket47653MMR_test.py:289 M1: Check the update of cn=test_entry, dc=example,dc=com [32mINFO [0m lib389:ticket47653MMR_test.py:295 M2: Bind as cn=Directory Manager [32mINFO [0m lib389:ticket47653MMR_test.py:297 M2: Try to retrieve cn=test_entry, dc=example,dc=com [32mINFO [0m lib389:ticket47653MMR_test.py:311 M2: Update cn=test_entry, dc=example,dc=com (bound as cn=bind_entry, dc=example,dc=com) [32mINFO [0m lib389:ticket47653MMR_test.py:329 M1: Bind as cn=Directory Manager [32mINFO [0m lib389:ticket47653MMR_test.py:331 M1: Check cn=test_entry, dc=example,dc=com.postalCode=1929) | |||
Passed | tickets/ticket47676_test.py::test_ticket47676_init | 0.91 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect eb40a57a-e431-4023-bb74-3baac0473966 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect da498bba-f774-4e18-8979-8dd453202b79 / got description=eb40a57a-e431-4023-bb74-3baac0473966) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47676_test.py:88 Add OCticket47676 that allows 'member' attribute [32mINFO [0m lib389:ticket47676_test.py:93 Add cn=bind_entry, dc=example,dc=com | |||
Passed | tickets/ticket47676_test.py::test_ticket47676_skip_oc_at | 4.77 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47676_test.py:120 ######################### ADD ###################### [32mINFO [0m lib389:ticket47676_test.py:123 Bind as cn=Directory Manager and add the add the entry with specific oc [32mINFO [0m lib389:ticket47676_test.py:140 Try to add Add cn=test_entry, dc=example,dc=com should be successful [32mINFO [0m lib389:ticket47676_test.py:147 Try to retrieve cn=test_entry, dc=example,dc=com from Master2 [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 1c74ed3b-7222-44b0-b794-dbfcf64a17bc / got description=da498bba-f774-4e18-8979-8dd453202b79) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389:ticket47676_test.py:152 Update cn=test_entry, dc=example,dc=com on M2 [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b8e64a3e-b595-4258-83f2-faf1bad84a09 / got description=1c74ed3b-7222-44b0-b794-dbfcf64a17bc) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working | |||
Passed | tickets/ticket47676_test.py::test_ticket47676_reject_action | 12.76 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47676_test.py:163 ######################### REJECT ACTION ###################### [32mINFO [0m lib389:ticket47676_test.py:177 Add OC2ticket47676 on M1 [32mINFO [0m lib389:ticket47676_test.py:182 Check OC2ticket47676 is in M1 [32mINFO [0m lib389:ticket47676_test.py:193 Update cn=test_entry, dc=example,dc=com on M1 [32mINFO [0m lib389:ticket47676_test.py:198 Check updated cn=test_entry, dc=example,dc=com on M2 [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5abf5468-3f83-465b-9819-1d16b18f177c / got description=b8e64a3e-b595-4258-83f2-faf1bad84a09) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389:ticket47676_test.py:205 Check OC2ticket47676 is not in M2 [32mINFO [0m lib389:ticket47676_test.py:215 ######################### NO MORE REJECT ACTION ###################### [32mINFO [0m lib389:ticket47676_test.py:226 Update cn=test_entry, dc=example,dc=com on M1 [32mINFO [0m lib389:ticket47676_test.py:231 Check updated cn=test_entry, dc=example,dc=com on M2 [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ec7d8e3e-0eaf-4052-b5c0-10ec3c429a53 / got description=5abf5468-3f83-465b-9819-1d16b18f177c) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389:ticket47676_test.py:237 Check OC2ticket47676 is in M2 | |||
Passed | tickets/ticket47714_test.py::test_ticket47714_init | 0.10 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47714_test.py:38 ############################################### [32mINFO [0m lib389:ticket47714_test.py:39 ####### [32mINFO [0m lib389:ticket47714_test.py:40 ####### Testing Ticket 47714 - [RFE] Update lastLoginTime also in Account Policy plugin if account lockout is based on passwordExpirationTime. [32mINFO [0m lib389:ticket47714_test.py:41 ####### [32mINFO [0m lib389:ticket47714_test.py:42 ############################################### [32mINFO [0m lib389.utils:ticket47714_test.py:55 ######################### Adding Account Policy entry: cn=Account Inactivation Policy,dc=example,dc=com ###################### [32mINFO [0m lib389.utils:ticket47714_test.py:60 ######################### Adding Test User entry: uid=ticket47714user,dc=example,dc=com ###################### | |||
Passed | tickets/ticket47714_test.py::test_ticket47714_run_0 | 11.05 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47714_test.py:38 ############################################### [32mINFO [0m lib389:ticket47714_test.py:39 ####### [32mINFO [0m lib389:ticket47714_test.py:40 ####### Account Policy - No new attr alwaysRecordLoginAttr in config [32mINFO [0m lib389:ticket47714_test.py:41 ####### [32mINFO [0m lib389:ticket47714_test.py:42 ############################################### [32mINFO [0m lib389.utils:ticket47714_test.py:96 ######################### Bind as uid=ticket47714user,dc=example,dc=com ###################### [32mINFO [0m lib389.utils:ticket47714_test.py:109 ######################### Bind as uid=ticket47714user,dc=example,dc=com again ###################### [32mINFO [0m lib389.utils:ticket47714_test.py:122 First lastLoginTime: b'20201110043146Z', Second lastLoginTime: b'20201110043148Z' [32mINFO [0m lib389.utils:ticket47714_test.py:133 ######################### cn=config,cn=Account Policy Plugin,cn=plugins,cn=config ###################### [32mINFO [0m lib389.utils:ticket47714_test.py:134 accountInactivityLimit: b'1' [32mINFO [0m lib389.utils:ticket47714_test.py:135 ######################### cn=config,cn=Account Policy Plugin,cn=plugins,cn=config DONE ###################### [32mINFO [0m lib389.utils:ticket47714_test.py:137 ######################### Bind as uid=ticket47714user,dc=example,dc=com again to fail ###################### [32mINFO [0m lib389.utils:ticket47714_test.py:141 CONSTRAINT VIOLATION Constraint violation [32mINFO [0m lib389.utils:ticket47714_test.py:142 uid=ticket47714user,dc=example,dc=com was successfully inactivated. | |||
Passed | tickets/ticket47714_test.py::test_ticket47714_run_1 | 6.14 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47714_test.py:38 ############################################### [32mINFO [0m lib389:ticket47714_test.py:39 ####### [32mINFO [0m lib389:ticket47714_test.py:40 ####### Account Policy - With new attr alwaysRecordLoginAttr in config [32mINFO [0m lib389:ticket47714_test.py:41 ####### [32mINFO [0m lib389:ticket47714_test.py:42 ############################################### [32mINFO [0m lib389.utils:ticket47714_test.py:179 ######################### Bind as uid=ticket47714user,dc=example,dc=com ###################### [32mINFO [0m lib389.utils:ticket47714_test.py:191 ######################### Bind as uid=ticket47714user,dc=example,dc=com again ###################### [32mINFO [0m lib389.utils:ticket47714_test.py:203 First lastLoginTime: b'20201110043156Z', Second lastLoginTime: b'20201110043157Z' [32mINFO [0m lib389:ticket47714_test.py:206 ticket47714 was successfully verified. | |||
Passed | tickets/ticket47721_test.py::test_ticket47721_init | 1.09 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect e8f39a2f-ec67-4e67-ad12-46dff59a8eb5 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ec61138f-aade-401e-8ac5-c49c49e9bc04 / got description=e8f39a2f-ec67-4e67-ad12-46dff59a8eb5) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47721_test.py:98 Add cn=bind_entry, dc=example,dc=com | |||
Passed | tickets/ticket47721_test.py::test_ticket47721_0 | 2.24 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 03268bbf-6083-474b-9f69-591294ec5880 / got description=ec61138f-aade-401e-8ac5-c49c49e9bc04) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 03268bbf-6083-474b-9f69-591294ec5880 / got description=ec61138f-aade-401e-8ac5-c49c49e9bc04) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working | |||
Passed | tickets/ticket47721_test.py::test_ticket47721_1 | 3.83 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket47721_test:ticket47721_test.py:127 Running test 1... [32mINFO [0m lib389:ticket47721_test.py:132 Add (M2) b"( ATticket47721-oid NAME 'ATticket47721' DESC 'test AT ticket 47721' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 X-ORIGIN ( 'Test 47721' 'user defined' ) )" [32mINFO [0m lib389:ticket47721_test.py:136 Chg (M2) b"( 2.16.840.1.113730.3.1.569 NAME 'cosPriority' DESC 'Netscape defined attribute type' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 X-ORIGIN 'Netscape Directory Server' )" [32mINFO [0m lib389:ticket47721_test.py:140 Add (M2) b"( OCticket47721-oid NAME 'OCticket47721' DESC 'An group of related automount objects' SUP top STRUCTURAL MUST ou X-ORIGIN 'draft-howard-rfc2307bis' )" [32mINFO [0m lib389:ticket47721_test.py:144 Chg (M2) b"( 5.3.6.1.1.1.2.0 NAME 'trustAccount' DESC 'Sets trust accounts information' SUP top AUXILIARY MUST trustModel MAY ( accessTo $ ou ) X-ORIGIN 'nss_ldap/pam_ldap' )" [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0463f07f-bd9e-487f-b492-38330df3c91f / got description=03268bbf-6083-474b-9f69-591294ec5880) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [35mDEBUG [0m tests.tickets.ticket47721_test:ticket47721_test.py:158 Master 1 schemaCSN: b'5faa17e8000000000000' [35mDEBUG [0m tests.tickets.ticket47721_test:ticket47721_test.py:159 Master 2 schemaCSN: b'5faa17e8000000000000' | |||
Passed | tickets/ticket47721_test.py::test_ticket47721_2 | 3.33 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket47721_test:ticket47721_test.py:163 Running test 2... [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 6f208e35-7f13-4a35-ae06-ebe21aa053d7 / got description=0463f07f-bd9e-487f-b492-38330df3c91f) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [35mDEBUG [0m tests.tickets.ticket47721_test:ticket47721_test.py:176 Master 1 schemaCSN: b'5faa17e8000000000000' [35mDEBUG [0m tests.tickets.ticket47721_test:ticket47721_test.py:177 Master 2 schemaCSN: b'5faa17e8000000000000' | |||
Passed | tickets/ticket47721_test.py::test_ticket47721_3 | 13.85 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket47721_test:ticket47721_test.py:195 Running test 3... [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:ticket47721_test.py:203 Update schema (M2) b"( ATtest3-oid NAME 'ATtest3' DESC 'test AT ticket 47721' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 X-ORIGIN ( 'Test 47721' 'user defined' ) )" [32mINFO [0m lib389:ticket47721_test.py:208 Update schema (M2) b"( OCtest3-oid NAME 'OCtest3' DESC 'An group of related automount objects' SUP top STRUCTURAL MUST ou X-ORIGIN 'draft-howard-rfc2307bis' )" [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 4f7c90a7-0f8a-432e-b15d-f1ab2b849ee3 / got description=6f208e35-7f13-4a35-ae06-ebe21aa053d7) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [35mDEBUG [0m tests.tickets.ticket47721_test:ticket47721_test.py:223 Master 1 schemaCSN: b'5faa17e8000000000000' [35mDEBUG [0m tests.tickets.ticket47721_test:ticket47721_test.py:224 Master 2 schemaCSN: b'5faa17f6000000000000' | |||
Passed | tickets/ticket47721_test.py::test_ticket47721_4 | 5.97 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket47721_test:ticket47721_test.py:245 Running test 4... [32mINFO [0m lib389:ticket47721_test.py:248 Update schema (M1) b"( ATtest4-oid NAME 'ATtest4' DESC 'test AT ticket 47721' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 X-ORIGIN ( 'Test 47721' 'user defined' ) )" [32mINFO [0m lib389:ticket47721_test.py:252 Update schema (M1) b"( OCtest4-oid NAME 'OCtest4' DESC 'An group of related automount objects' SUP top STRUCTURAL MUST ou X-ORIGIN 'draft-howard-rfc2307bis' )" [32mINFO [0m lib389:ticket47721_test.py:255 trigger replication M1->M2: to update the schema [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect da11a198-311a-43d3-8a7f-a0feb7afc32c / got description=4f7c90a7-0f8a-432e-b15d-f1ab2b849ee3) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect da11a198-311a-43d3-8a7f-a0feb7afc32c / got description=4f7c90a7-0f8a-432e-b15d-f1ab2b849ee3) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389:ticket47721_test.py:264 trigger replication M1->M2: to push the schema [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 2c2e3033-274d-4c8d-bbab-18bc20ae82a0 / got description=da11a198-311a-43d3-8a7f-a0feb7afc32c) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [35mDEBUG [0m tests.tickets.ticket47721_test:ticket47721_test.py:276 Master 1 schemaCSN: b'5faa17fe000000000000' [35mDEBUG [0m tests.tickets.ticket47721_test:ticket47721_test.py:277 Master 2 schemaCSN: b'5faa17fe000000000000' | |||
Passed | tickets/ticket47787_test.py::test_ticket47787_init | 3.31 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 1356c1e5-78fe-4c37-b5a0-74aa0233e28e / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 4df52291-cd34-456a-a9c1-1c358468ef8f / got description=1356c1e5-78fe-4c37-b5a0-74aa0233e28e) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47787_test.py:294 ######################### INITIALIZATION ###################### [32mINFO [0m lib389:ticket47787_test.py:297 Add cn=bind_entry,dc=example,dc=com [32mINFO [0m lib389:ticket47787_test.py:305 Add cn=staged user,dc=example,dc=com [32mINFO [0m lib389:ticket47787_test.py:312 Add cn=accounts,dc=example,dc=com | |||
Passed | tickets/ticket47787_test.py::test_ticket47787_2 | 16.95 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47787_test.py:69 ############################################### [32mINFO [0m lib389:ticket47787_test.py:70 ####### [32mINFO [0m lib389:ticket47787_test.py:71 ####### test_ticket47787_2 [32mINFO [0m lib389:ticket47787_test.py:72 ####### [32mINFO [0m lib389:ticket47787_test.py:73 ############################################### [32mINFO [0m lib389:ticket47787_test.py:59 Bind as cn=Directory Manager [32mINFO [0m lib389:ticket47787_test.py:59 Bind as cn=Directory Manager [32mINFO [0m lib389:ticket47787_test.py:159 ######################### Pause RA M1<->M2 ###################### [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:ticket47787_test.py:209 ######################### MOD cn=new_account18,cn=staged user,dc=example,dc=com (M2) ###################### [32mINFO [0m lib389:ticket47787_test.py:200 ######################### DELETE new_account1 (M1) ###################### [32mINFO [0m lib389:ticket47787_test.py:209 ######################### MOD cn=new_account18,cn=staged user,dc=example,dc=com (M2) ###################### [32mINFO [0m lib389:ticket47787_test.py:209 ######################### MOD cn=new_account19,cn=staged user,dc=example,dc=com (M2) ###################### [32mINFO [0m lib389:ticket47787_test.py:209 ######################### MOD cn=new_account1,cn=staged user,dc=example,dc=com (M2) ###################### [32mINFO [0m lib389:ticket47787_test.py:209 ######################### MOD cn=new_account19,cn=staged user,dc=example,dc=com (M2) ###################### [32mINFO [0m lib389:ticket47787_test.py:170 ######################### resume RA M1<->M2 ###################### [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:ticket47787_test.py:388 ######################### Check DEL replicated on M2 ###################### [32mINFO [0m lib389:ticket47787_test.py:79 ######################### Tombstone on M1 ###################### [32mINFO [0m lib389:ticket47787_test.py:92 ######################### Tombstone on M2 ###################### [32mINFO [0m lib389:ticket47787_test.py:96 ######################### Description ###################### DEL M1 - MOD M2 [32mINFO [0m lib389:ticket47787_test.py:97 M1 only [32mINFO [0m lib389:ticket47787_test.py:108 M2 only [32mINFO [0m lib389:ticket47787_test.py:119 M1 differs M2 [32mINFO [0m lib389:ticket47787_test.py:409 ######################### Check MOD replicated on M1 ###################### | |||
Passed | tickets/ticket47808_test.py::test_ticket47808_run | 4.08 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47808_test.py:36 Bind as cn=Directory Manager [32mINFO [0m lib389:ticket47808_test.py:39 ######################### SETUP ATTR UNIQ PLUGIN ###################### [32mINFO [0m lib389:ticket47808_test.py:46 ######################### ADD USER 1 ###################### [32mINFO [0m lib389:ticket47808_test.py:55 Try to add Add dn: cn=test_entry 1, dc=example,dc=com cn: test_entry 1 objectclass: top objectclass: person sn: test_entry : dn: cn=test_entry 1, dc=example,dc=com cn: test_entry 1 objectclass: top objectclass: person sn: test_entry [32mINFO [0m lib389:ticket47808_test.py:58 ######################### Restart Server ###################### [32mINFO [0m lib389:ticket47808_test.py:62 ######################### ADD USER 2 ###################### [32mINFO [0m lib389:ticket47808_test.py:71 Try to add Add dn: cn=test_entry 2, dc=example,dc=com cn: test_entry 2 objectclass: top objectclass: person sn: test_entry : dn: cn=test_entry 2, dc=example,dc=com cn: test_entry 2 objectclass: top objectclass: person sn: test_entry [33mWARNING [0m lib389:ticket47808_test.py:75 Adding cn=test_entry 2, dc=example,dc=com failed [32mINFO [0m lib389:ticket47808_test.py:78 ######################### IS SERVER UP? ###################### [32mINFO [0m lib389:ticket47808_test.py:81 Yes, it's up. [32mINFO [0m lib389:ticket47808_test.py:83 ######################### CHECK USER 2 NOT ADDED ###################### [32mINFO [0m lib389:ticket47808_test.py:84 Try to search cn=test_entry 2, dc=example,dc=com [32mINFO [0m lib389:ticket47808_test.py:88 Found none [32mINFO [0m lib389:ticket47808_test.py:90 ######################### DELETE USER 1 ###################### [32mINFO [0m lib389:ticket47808_test.py:92 Try to delete cn=test_entry 1, dc=example,dc=com | |||
Passed | tickets/ticket47823_test.py::test_ticket47823_init | 13.03 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | tickets/ticket47823_test.py::test_ticket47823_one_container_add | 19.59 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47823_test.py:58 ############################################### [32mINFO [0m lib389:ticket47823_test.py:59 ####### [32mINFO [0m lib389:ticket47823_test.py:60 ####### With former config (args), check attribute uniqueness with 'cn' (ADD) [32mINFO [0m lib389:ticket47823_test.py:61 ####### [32mINFO [0m lib389:ticket47823_test.py:62 ############################################### [32mINFO [0m lib389:ticket47823_test.py:140 Uniqueness not enforced: create the entries [32mINFO [0m lib389:ticket47823_test.py:155 Uniqueness enforced: checks second entry is rejected [32mINFO [0m lib389:ticket47823_test.py:58 ############################################### [32mINFO [0m lib389:ticket47823_test.py:59 ####### [32mINFO [0m lib389:ticket47823_test.py:60 ####### With new config (args), check attribute uniqueness with 'cn' (ADD) [32mINFO [0m lib389:ticket47823_test.py:61 ####### [32mINFO [0m lib389:ticket47823_test.py:62 ############################################### [32mINFO [0m lib389:ticket47823_test.py:140 Uniqueness not enforced: create the entries [32mINFO [0m lib389:ticket47823_test.py:155 Uniqueness enforced: checks second entry is rejected | |||
Passed | tickets/ticket47823_test.py::test_ticket47823_one_container_mod | 10.18 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47823_test.py:58 ############################################### [32mINFO [0m lib389:ticket47823_test.py:59 ####### [32mINFO [0m lib389:ticket47823_test.py:60 ####### With former config (args), check attribute uniqueness with 'cn' (MOD) [32mINFO [0m lib389:ticket47823_test.py:61 ####### [32mINFO [0m lib389:ticket47823_test.py:62 ############################################### [32mINFO [0m lib389:ticket47823_test.py:193 Uniqueness enforced: checks MOD ADD entry is rejected [32mINFO [0m lib389:ticket47823_test.py:210 Uniqueness enforced: checks MOD REPLACE entry is rejected [32mINFO [0m lib389:ticket47823_test.py:58 ############################################### [32mINFO [0m lib389:ticket47823_test.py:59 ####### [32mINFO [0m lib389:ticket47823_test.py:60 ####### With new config (args), check attribute uniqueness with 'cn' (MOD) [32mINFO [0m lib389:ticket47823_test.py:61 ####### [32mINFO [0m lib389:ticket47823_test.py:62 ############################################### [32mINFO [0m lib389:ticket47823_test.py:193 Uniqueness enforced: checks MOD ADD entry is rejected [32mINFO [0m lib389:ticket47823_test.py:210 Uniqueness enforced: checks MOD REPLACE entry is rejected | |||
Passed | tickets/ticket47823_test.py::test_ticket47823_one_container_modrdn | 10.60 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47823_test.py:58 ############################################### [32mINFO [0m lib389:ticket47823_test.py:59 ####### [32mINFO [0m lib389:ticket47823_test.py:60 ####### With former config (args), check attribute uniqueness with 'cn' (MODRDN) [32mINFO [0m lib389:ticket47823_test.py:61 ####### [32mINFO [0m lib389:ticket47823_test.py:62 ############################################### [32mINFO [0m lib389:ticket47823_test.py:237 Uniqueness enforced: checks MODRDN entry is rejected [32mINFO [0m lib389:ticket47823_test.py:58 ############################################### [32mINFO [0m lib389:ticket47823_test.py:59 ####### [32mINFO [0m lib389:ticket47823_test.py:60 ####### With former config (args), check attribute uniqueness with 'cn' (MODRDN) [32mINFO [0m lib389:ticket47823_test.py:61 ####### [32mINFO [0m lib389:ticket47823_test.py:62 ############################################### [32mINFO [0m lib389:ticket47823_test.py:237 Uniqueness enforced: checks MODRDN entry is rejected | |||
Passed | tickets/ticket47823_test.py::test_ticket47823_multi_containers_add | 10.24 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47823_test.py:58 ############################################### [32mINFO [0m lib389:ticket47823_test.py:59 ####### [32mINFO [0m lib389:ticket47823_test.py:60 ####### With former config (args), check attribute uniqueness with 'cn' (ADD) [32mINFO [0m lib389:ticket47823_test.py:61 ####### [32mINFO [0m lib389:ticket47823_test.py:62 ############################################### [32mINFO [0m lib389:ticket47823_test.py:58 ############################################### [32mINFO [0m lib389:ticket47823_test.py:59 ####### [32mINFO [0m lib389:ticket47823_test.py:60 ####### With new config (args), check attribute uniqueness with 'cn' (ADD) [32mINFO [0m lib389:ticket47823_test.py:61 ####### [32mINFO [0m lib389:ticket47823_test.py:62 ############################################### | |||
Passed | tickets/ticket47823_test.py::test_ticket47823_multi_containers_mod | 10.16 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47823_test.py:58 ############################################### [32mINFO [0m lib389:ticket47823_test.py:59 ####### [32mINFO [0m lib389:ticket47823_test.py:60 ####### With former config (args), check attribute uniqueness with 'cn' (MOD) on separated container [32mINFO [0m lib389:ticket47823_test.py:61 ####### [32mINFO [0m lib389:ticket47823_test.py:62 ############################################### [32mINFO [0m lib389:ticket47823_test.py:525 Uniqueness not enforced: if same 'cn' modified (add/replace) on separated containers [32mINFO [0m lib389:ticket47823_test.py:58 ############################################### [32mINFO [0m lib389:ticket47823_test.py:59 ####### [32mINFO [0m lib389:ticket47823_test.py:60 ####### With new config (args), check attribute uniqueness with 'cn' (MOD) on separated container [32mINFO [0m lib389:ticket47823_test.py:61 ####### [32mINFO [0m lib389:ticket47823_test.py:62 ############################################### [32mINFO [0m lib389:ticket47823_test.py:531 Uniqueness not enforced: if same 'cn' modified (add/replace) on separated containers | |||
Passed | tickets/ticket47823_test.py::test_ticket47823_multi_containers_modrdn | 10.36 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47823_test.py:58 ############################################### [32mINFO [0m lib389:ticket47823_test.py:59 ####### [32mINFO [0m lib389:ticket47823_test.py:60 ####### With former config (args), check attribute uniqueness with 'cn' (MODRDN) on separated containers [32mINFO [0m lib389:ticket47823_test.py:61 ####### [32mINFO [0m lib389:ticket47823_test.py:62 ############################################### [32mINFO [0m lib389:ticket47823_test.py:545 Uniqueness not enforced: checks MODRDN entry is accepted on separated containers [32mINFO [0m lib389:ticket47823_test.py:548 Uniqueness not enforced: checks MODRDN entry is accepted on separated containers | |||
Passed | tickets/ticket47823_test.py::test_ticket47823_across_multi_containers_add | 4.92 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47823_test.py:58 ############################################### [32mINFO [0m lib389:ticket47823_test.py:59 ####### [32mINFO [0m lib389:ticket47823_test.py:60 ####### With new config (args), check attribute uniqueness with 'cn' (ADD) across several containers [32mINFO [0m lib389:ticket47823_test.py:61 ####### [32mINFO [0m lib389:ticket47823_test.py:62 ############################################### | |||
Passed | tickets/ticket47823_test.py::test_ticket47823_across_multi_containers_mod | 4.92 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47823_test.py:58 ############################################### [32mINFO [0m lib389:ticket47823_test.py:59 ####### [32mINFO [0m lib389:ticket47823_test.py:60 ####### With new config (args), check attribute uniqueness with 'cn' (MOD) across several containers [32mINFO [0m lib389:ticket47823_test.py:61 ####### [32mINFO [0m lib389:ticket47823_test.py:62 ############################################### | |||
Passed | tickets/ticket47823_test.py::test_ticket47823_across_multi_containers_modrdn | 5.28 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47823_test.py:58 ############################################### [32mINFO [0m lib389:ticket47823_test.py:59 ####### [32mINFO [0m lib389:ticket47823_test.py:60 ####### With new config (args), check attribute uniqueness with 'cn' (MODRDN) across several containers [32mINFO [0m lib389:ticket47823_test.py:61 ####### [32mINFO [0m lib389:ticket47823_test.py:62 ############################################### | |||
Passed | tickets/ticket47823_test.py::test_ticket47823_invalid_config_1 | 11.25 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47823_test.py:58 ############################################### [32mINFO [0m lib389:ticket47823_test.py:59 ####### [32mINFO [0m lib389:ticket47823_test.py:60 ####### Invalid config (old): arg0 is missing [32mINFO [0m lib389:ticket47823_test.py:61 ####### [32mINFO [0m lib389:ticket47823_test.py:62 ############################################### | |||
Passed | tickets/ticket47823_test.py::test_ticket47823_invalid_config_2 | 9.90 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47823_test.py:58 ############################################### [32mINFO [0m lib389:ticket47823_test.py:59 ####### [32mINFO [0m lib389:ticket47823_test.py:60 ####### Invalid config (old): arg1 is missing [32mINFO [0m lib389:ticket47823_test.py:61 ####### [32mINFO [0m lib389:ticket47823_test.py:62 ############################################### | |||
Passed | tickets/ticket47823_test.py::test_ticket47823_invalid_config_3 | 11.32 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47823_test.py:58 ############################################### [32mINFO [0m lib389:ticket47823_test.py:59 ####### [32mINFO [0m lib389:ticket47823_test.py:60 ####### Invalid config (old): arg0 is missing but new config attrname exists [32mINFO [0m lib389:ticket47823_test.py:61 ####### [32mINFO [0m lib389:ticket47823_test.py:62 ############################################### | |||
Passed | tickets/ticket47823_test.py::test_ticket47823_invalid_config_4 | 10.13 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47823_test.py:58 ############################################### [32mINFO [0m lib389:ticket47823_test.py:59 ####### [32mINFO [0m lib389:ticket47823_test.py:60 ####### Invalid config (old): arg1 is missing but new config exist [32mINFO [0m lib389:ticket47823_test.py:61 ####### [32mINFO [0m lib389:ticket47823_test.py:62 ############################################### | |||
Passed | tickets/ticket47823_test.py::test_ticket47823_invalid_config_5 | 9.85 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47823_test.py:58 ############################################### [32mINFO [0m lib389:ticket47823_test.py:59 ####### [32mINFO [0m lib389:ticket47823_test.py:60 ####### Invalid config (new): uniqueness-attribute-name is missing [32mINFO [0m lib389:ticket47823_test.py:61 ####### [32mINFO [0m lib389:ticket47823_test.py:62 ############################################### | |||
Passed | tickets/ticket47823_test.py::test_ticket47823_invalid_config_6 | 10.19 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47823_test.py:58 ############################################### [32mINFO [0m lib389:ticket47823_test.py:59 ####### [32mINFO [0m lib389:ticket47823_test.py:60 ####### Invalid config (new): uniqueness-subtrees is missing [32mINFO [0m lib389:ticket47823_test.py:61 ####### [32mINFO [0m lib389:ticket47823_test.py:62 ############################################### | |||
Passed | tickets/ticket47823_test.py::test_ticket47823_invalid_config_7 | 11.50 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47823_test.py:58 ############################################### [32mINFO [0m lib389:ticket47823_test.py:59 ####### [32mINFO [0m lib389:ticket47823_test.py:60 ####### Invalid config (new): uniqueness-subtrees are invalid [32mINFO [0m lib389:ticket47823_test.py:61 ####### [32mINFO [0m lib389:ticket47823_test.py:62 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_init | 4.94 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_0 | 0.10 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47828_test.py:42 ############################################### [32mINFO [0m lib389:ticket47828_test.py:43 ####### [32mINFO [0m lib389:ticket47828_test.py:44 ####### NO exclude scope: Add an active entry and check its ALLOCATED_ATTR is set [32mINFO [0m lib389:ticket47828_test.py:45 ####### [32mINFO [0m lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_1 | 0.03 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47828_test.py:42 ############################################### [32mINFO [0m lib389:ticket47828_test.py:43 ####### [32mINFO [0m lib389:ticket47828_test.py:44 ####### NO exclude scope: Add an active entry and check its ALLOCATED_ATTR is unchanged (!= magic) [32mINFO [0m lib389:ticket47828_test.py:45 ####### [32mINFO [0m lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_2 | 0.27 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47828_test.py:42 ############################################### [32mINFO [0m lib389:ticket47828_test.py:43 ####### [32mINFO [0m lib389:ticket47828_test.py:44 ####### NO exclude scope: Add a staged entry and check its ALLOCATED_ATTR is set [32mINFO [0m lib389:ticket47828_test.py:45 ####### [32mINFO [0m lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_3 | 0.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47828_test.py:42 ############################################### [32mINFO [0m lib389:ticket47828_test.py:43 ####### [32mINFO [0m lib389:ticket47828_test.py:44 ####### NO exclude scope: Add a staged entry and check its ALLOCATED_ATTR is unchanged (!= magic) [32mINFO [0m lib389:ticket47828_test.py:45 ####### [32mINFO [0m lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_4 | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47828_test.py:42 ############################################### [32mINFO [0m lib389:ticket47828_test.py:43 ####### [32mINFO [0m lib389:ticket47828_test.py:44 ####### Exclude the provisioning container [32mINFO [0m lib389:ticket47828_test.py:45 ####### [32mINFO [0m lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_5 | 0.03 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47828_test.py:42 ############################################### [32mINFO [0m lib389:ticket47828_test.py:43 ####### [32mINFO [0m lib389:ticket47828_test.py:44 ####### Provisioning excluded scope: Add an active entry and check its ALLOCATED_ATTR is set [32mINFO [0m lib389:ticket47828_test.py:45 ####### [32mINFO [0m lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_6 | 0.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47828_test.py:42 ############################################### [32mINFO [0m lib389:ticket47828_test.py:43 ####### [32mINFO [0m lib389:ticket47828_test.py:44 ####### Provisioning excluded scope: Add an active entry and check its ALLOCATED_ATTR is unchanged (!= magic) [32mINFO [0m lib389:ticket47828_test.py:45 ####### [32mINFO [0m lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_7 | 0.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47828_test.py:42 ############################################### [32mINFO [0m lib389:ticket47828_test.py:43 ####### [32mINFO [0m lib389:ticket47828_test.py:44 ####### Provisioning excluded scope: Add a staged entry and check its ALLOCATED_ATTR is not set [32mINFO [0m lib389:ticket47828_test.py:45 ####### [32mINFO [0m lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_8 | 0.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47828_test.py:42 ############################################### [32mINFO [0m lib389:ticket47828_test.py:43 ####### [32mINFO [0m lib389:ticket47828_test.py:44 ####### Provisioning excluded scope: Add a staged entry and check its ALLOCATED_ATTR is unchanged (!= magic) [32mINFO [0m lib389:ticket47828_test.py:45 ####### [32mINFO [0m lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_9 | 0.05 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47828_test.py:42 ############################################### [32mINFO [0m lib389:ticket47828_test.py:43 ####### [32mINFO [0m lib389:ticket47828_test.py:44 ####### Provisioning excluded scope: Add an dummy entry and check its ALLOCATED_ATTR is set [32mINFO [0m lib389:ticket47828_test.py:45 ####### [32mINFO [0m lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_10 | 0.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47828_test.py:42 ############################################### [32mINFO [0m lib389:ticket47828_test.py:43 ####### [32mINFO [0m lib389:ticket47828_test.py:44 ####### Provisioning excluded scope: Add an dummy entry and check its ALLOCATED_ATTR is unchanged (!= magic) [32mINFO [0m lib389:ticket47828_test.py:45 ####### [32mINFO [0m lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_11 | 0.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47828_test.py:42 ############################################### [32mINFO [0m lib389:ticket47828_test.py:43 ####### [32mINFO [0m lib389:ticket47828_test.py:44 ####### Exclude (in addition) the dummy container [32mINFO [0m lib389:ticket47828_test.py:45 ####### [32mINFO [0m lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_12 | 0.03 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47828_test.py:42 ############################################### [32mINFO [0m lib389:ticket47828_test.py:43 ####### [32mINFO [0m lib389:ticket47828_test.py:44 ####### Provisioning/Dummy excluded scope: Add an active entry and check its ALLOCATED_ATTR is set [32mINFO [0m lib389:ticket47828_test.py:45 ####### [32mINFO [0m lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_13 | 0.03 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47828_test.py:42 ############################################### [32mINFO [0m lib389:ticket47828_test.py:43 ####### [32mINFO [0m lib389:ticket47828_test.py:44 ####### Provisioning/Dummy excluded scope: Add an active entry and check its ALLOCATED_ATTR is unchanged (!= magic) [32mINFO [0m lib389:ticket47828_test.py:45 ####### [32mINFO [0m lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_14 | 0.03 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47828_test.py:42 ############################################### [32mINFO [0m lib389:ticket47828_test.py:43 ####### [32mINFO [0m lib389:ticket47828_test.py:44 ####### Provisioning/Dummy excluded scope: Add a staged entry and check its ALLOCATED_ATTR is not set [32mINFO [0m lib389:ticket47828_test.py:45 ####### [32mINFO [0m lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_15 | 0.03 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47828_test.py:42 ############################################### [32mINFO [0m lib389:ticket47828_test.py:43 ####### [32mINFO [0m lib389:ticket47828_test.py:44 ####### Provisioning/Dummy excluded scope: Add a staged entry and check its ALLOCATED_ATTR is unchanged (!= magic) [32mINFO [0m lib389:ticket47828_test.py:45 ####### [32mINFO [0m lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_16 | 0.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47828_test.py:42 ############################################### [32mINFO [0m lib389:ticket47828_test.py:43 ####### [32mINFO [0m lib389:ticket47828_test.py:44 ####### Provisioning/Dummy excluded scope: Add an dummy entry and check its ALLOCATED_ATTR not is set [32mINFO [0m lib389:ticket47828_test.py:45 ####### [32mINFO [0m lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_17 | 0.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47828_test.py:42 ############################################### [32mINFO [0m lib389:ticket47828_test.py:43 ####### [32mINFO [0m lib389:ticket47828_test.py:44 ####### Provisioning/Dummy excluded scope: Add an dummy entry and check its ALLOCATED_ATTR is unchanged (!= magic) [32mINFO [0m lib389:ticket47828_test.py:45 ####### [32mINFO [0m lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_18 | 0.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47828_test.py:42 ############################################### [32mINFO [0m lib389:ticket47828_test.py:43 ####### [32mINFO [0m lib389:ticket47828_test.py:44 ####### Exclude PROVISIONING and a wrong container [32mINFO [0m lib389:ticket47828_test.py:45 ####### [32mINFO [0m lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_19 | 0.03 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47828_test.py:42 ############################################### [32mINFO [0m lib389:ticket47828_test.py:43 ####### [32mINFO [0m lib389:ticket47828_test.py:44 ####### Provisioning+wrong container excluded scope: Add an active entry and check its ALLOCATED_ATTR is set [32mINFO [0m lib389:ticket47828_test.py:45 ####### [32mINFO [0m lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_20 | 0.03 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47828_test.py:42 ############################################### [32mINFO [0m lib389:ticket47828_test.py:43 ####### [32mINFO [0m lib389:ticket47828_test.py:44 ####### Provisioning+wrong container excluded scope: Add an active entry and check its ALLOCATED_ATTR is unchanged (!= magic) [32mINFO [0m lib389:ticket47828_test.py:45 ####### [32mINFO [0m lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_21 | 0.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47828_test.py:42 ############################################### [32mINFO [0m lib389:ticket47828_test.py:43 ####### [32mINFO [0m lib389:ticket47828_test.py:44 ####### Provisioning+wrong container excluded scope: Add a staged entry and check its ALLOCATED_ATTR is not set [32mINFO [0m lib389:ticket47828_test.py:45 ####### [32mINFO [0m lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_22 | 0.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47828_test.py:42 ############################################### [32mINFO [0m lib389:ticket47828_test.py:43 ####### [32mINFO [0m lib389:ticket47828_test.py:44 ####### Provisioning+wrong container excluded scope: Add a staged entry and check its ALLOCATED_ATTR is unchanged (!= magic) [32mINFO [0m lib389:ticket47828_test.py:45 ####### [32mINFO [0m lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_23 | 0.03 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47828_test.py:42 ############################################### [32mINFO [0m lib389:ticket47828_test.py:43 ####### [32mINFO [0m lib389:ticket47828_test.py:44 ####### Provisioning+wrong container excluded scope: Add an dummy entry and check its ALLOCATED_ATTR is set [32mINFO [0m lib389:ticket47828_test.py:45 ####### [32mINFO [0m lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_24 | 0.03 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47828_test.py:42 ############################################### [32mINFO [0m lib389:ticket47828_test.py:43 ####### [32mINFO [0m lib389:ticket47828_test.py:44 ####### Provisioning+wrong container excluded scope: Add an dummy entry and check its ALLOCATED_ATTR is unchanged (!= magic) [32mINFO [0m lib389:ticket47828_test.py:45 ####### [32mINFO [0m lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_25 | 0.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47828_test.py:42 ############################################### [32mINFO [0m lib389:ticket47828_test.py:43 ####### [32mINFO [0m lib389:ticket47828_test.py:44 ####### Exclude a wrong container [32mINFO [0m lib389:ticket47828_test.py:45 ####### [32mINFO [0m lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_26 | 0.03 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47828_test.py:42 ############################################### [32mINFO [0m lib389:ticket47828_test.py:43 ####### [32mINFO [0m lib389:ticket47828_test.py:44 ####### Wrong container excluded scope: Add an active entry and check its ALLOCATED_ATTR is set [32mINFO [0m lib389:ticket47828_test.py:45 ####### [32mINFO [0m lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_27 | 0.04 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47828_test.py:42 ############################################### [32mINFO [0m lib389:ticket47828_test.py:43 ####### [32mINFO [0m lib389:ticket47828_test.py:44 ####### Wrong container excluded scope: Add an active entry and check its ALLOCATED_ATTR is unchanged (!= magic) [32mINFO [0m lib389:ticket47828_test.py:45 ####### [32mINFO [0m lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_28 | 0.03 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47828_test.py:42 ############################################### [32mINFO [0m lib389:ticket47828_test.py:43 ####### [32mINFO [0m lib389:ticket47828_test.py:44 ####### Wrong container excluded scope: Add a staged entry and check its ALLOCATED_ATTR is not set [32mINFO [0m lib389:ticket47828_test.py:45 ####### [32mINFO [0m lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_29 | 0.04 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47828_test.py:42 ############################################### [32mINFO [0m lib389:ticket47828_test.py:43 ####### [32mINFO [0m lib389:ticket47828_test.py:44 ####### Wrong container excluded scope: Add a staged entry and check its ALLOCATED_ATTR is unchanged (!= magic) [32mINFO [0m lib389:ticket47828_test.py:45 ####### [32mINFO [0m lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_30 | 0.03 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47828_test.py:42 ############################################### [32mINFO [0m lib389:ticket47828_test.py:43 ####### [32mINFO [0m lib389:ticket47828_test.py:44 ####### Wrong container excluded scope: Add an dummy entry and check its ALLOCATED_ATTR is set [32mINFO [0m lib389:ticket47828_test.py:45 ####### [32mINFO [0m lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_31 | 0.04 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47828_test.py:42 ############################################### [32mINFO [0m lib389:ticket47828_test.py:43 ####### [32mINFO [0m lib389:ticket47828_test.py:44 ####### Wrong container excluded scope: Add an dummy entry and check its ALLOCATED_ATTR is unchanged (!= magic) [32mINFO [0m lib389:ticket47828_test.py:45 ####### [32mINFO [0m lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_init | 4.92 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_mod_active_user_1 | 2.17 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47829_test.py:62 ############################################### [32mINFO [0m lib389:ticket47829_test.py:63 ####### [32mINFO [0m lib389:ticket47829_test.py:64 ####### MOD: add an active user to an active group [32mINFO [0m lib389:ticket47829_test.py:65 ####### [32mINFO [0m lib389:ticket47829_test.py:66 ############################################### [32mINFO [0m lib389:ticket47829_test.py:172 add entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=active guy,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:172 delete entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_mod_active_user_2 | 2.35 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47829_test.py:62 ############################################### [32mINFO [0m lib389:ticket47829_test.py:63 ####### [32mINFO [0m lib389:ticket47829_test.py:64 ####### MOD: add an Active user to a Stage group [32mINFO [0m lib389:ticket47829_test.py:65 ####### [32mINFO [0m lib389:ticket47829_test.py:66 ############################################### [32mINFO [0m lib389:ticket47829_test.py:172 add entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:173 to group cn=stage group,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:116 !!!!!!! cn=stage group,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com: member ->b'cn=active guy,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:172 delete entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:173 to group cn=stage group,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_mod_active_user_3 | 2.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47829_test.py:62 ############################################### [32mINFO [0m lib389:ticket47829_test.py:63 ####### [32mINFO [0m lib389:ticket47829_test.py:64 ####### MOD: add an Active user to a out of scope group [32mINFO [0m lib389:ticket47829_test.py:65 ####### [32mINFO [0m lib389:ticket47829_test.py:66 ############################################### [32mINFO [0m lib389:ticket47829_test.py:172 add entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:173 to group cn=out group,cn=out,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:116 !!!!!!! cn=out group,cn=out,dc=example,dc=com: member ->b'cn=active guy,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:172 delete entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:173 to group cn=out group,cn=out,dc=example,dc=com | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_mod_stage_user_1 | 2.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47829_test.py:62 ############################################### [32mINFO [0m lib389:ticket47829_test.py:63 ####### [32mINFO [0m lib389:ticket47829_test.py:64 ####### MOD: add an Stage user to a Active group [32mINFO [0m lib389:ticket47829_test.py:65 ####### [32mINFO [0m lib389:ticket47829_test.py:66 ############################################### [32mINFO [0m lib389:ticket47829_test.py:172 add entry cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:172 delete entry cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_mod_stage_user_2 | 2.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47829_test.py:62 ############################################### [32mINFO [0m lib389:ticket47829_test.py:63 ####### [32mINFO [0m lib389:ticket47829_test.py:64 ####### MOD: add an Stage user to a Stage group [32mINFO [0m lib389:ticket47829_test.py:65 ####### [32mINFO [0m lib389:ticket47829_test.py:66 ############################################### [32mINFO [0m lib389:ticket47829_test.py:172 add entry cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:173 to group cn=stage group,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:116 !!!!!!! cn=stage group,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com: member ->b'cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:172 delete entry cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:173 to group cn=stage group,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_mod_stage_user_3 | 2.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47829_test.py:62 ############################################### [32mINFO [0m lib389:ticket47829_test.py:63 ####### [32mINFO [0m lib389:ticket47829_test.py:64 ####### MOD: add an Stage user to a out of scope group [32mINFO [0m lib389:ticket47829_test.py:65 ####### [32mINFO [0m lib389:ticket47829_test.py:66 ############################################### [32mINFO [0m lib389:ticket47829_test.py:172 add entry cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:173 to group cn=out group,cn=out,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:116 !!!!!!! cn=out group,cn=out,dc=example,dc=com: member ->b'cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:172 delete entry cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:173 to group cn=out group,cn=out,dc=example,dc=com | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_mod_out_user_1 | 2.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47829_test.py:62 ############################################### [32mINFO [0m lib389:ticket47829_test.py:63 ####### [32mINFO [0m lib389:ticket47829_test.py:64 ####### MOD: add an out of scope user to an active group [32mINFO [0m lib389:ticket47829_test.py:65 ####### [32mINFO [0m lib389:ticket47829_test.py:66 ############################################### [32mINFO [0m lib389:ticket47829_test.py:172 add entry cn=out guy,cn=out,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=out guy,cn=out,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:172 delete entry cn=out guy,cn=out,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_mod_out_user_2 | 2.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47829_test.py:62 ############################################### [32mINFO [0m lib389:ticket47829_test.py:63 ####### [32mINFO [0m lib389:ticket47829_test.py:64 ####### MOD: add an out of scope user to a Stage group [32mINFO [0m lib389:ticket47829_test.py:65 ####### [32mINFO [0m lib389:ticket47829_test.py:66 ############################################### [32mINFO [0m lib389:ticket47829_test.py:172 add entry cn=out guy,cn=out,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:173 to group cn=stage group,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:116 !!!!!!! cn=stage group,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com: member ->b'cn=out guy,cn=out,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:172 delete entry cn=out guy,cn=out,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:173 to group cn=stage group,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_mod_out_user_3 | 2.05 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47829_test.py:62 ############################################### [32mINFO [0m lib389:ticket47829_test.py:63 ####### [32mINFO [0m lib389:ticket47829_test.py:64 ####### MOD: add an out of scope user to an out of scope group [32mINFO [0m lib389:ticket47829_test.py:65 ####### [32mINFO [0m lib389:ticket47829_test.py:66 ############################################### [32mINFO [0m lib389:ticket47829_test.py:172 add entry cn=out guy,cn=out,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:173 to group cn=out group,cn=out,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:116 !!!!!!! cn=out group,cn=out,dc=example,dc=com: member ->b'cn=out guy,cn=out,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:172 delete entry cn=out guy,cn=out,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:173 to group cn=out group,cn=out,dc=example,dc=com | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_mod_active_user_modrdn_active_user_1 | 2.07 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47829_test.py:62 ############################################### [32mINFO [0m lib389:ticket47829_test.py:63 ####### [32mINFO [0m lib389:ticket47829_test.py:64 ####### add an Active user to a Active group. Then move Active user to Active [32mINFO [0m lib389:ticket47829_test.py:65 ####### [32mINFO [0m lib389:ticket47829_test.py:66 ############################################### [32mINFO [0m lib389:ticket47829_test.py:172 add entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=active guy,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:132 ######################### MODRDN cn=xactive guy ###################### [32mINFO [0m lib389:ticket47829_test.py:96 !!!!!!! cn=xactive guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=xactive guy,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:132 ######################### MODRDN cn=active guy ###################### [32mINFO [0m lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=active guy,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:172 delete entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_mod_active_user_modrdn_stage_user_1 | 1.09 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47829_test.py:62 ############################################### [32mINFO [0m lib389:ticket47829_test.py:63 ####### [32mINFO [0m lib389:ticket47829_test.py:64 ####### add an Active user to a Active group. Then move Active user to Stage [32mINFO [0m lib389:ticket47829_test.py:65 ####### [32mINFO [0m lib389:ticket47829_test.py:66 ############################################### [32mINFO [0m lib389:ticket47829_test.py:172 add entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=active guy,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:132 ######################### MODRDN cn=active guy ###################### [32mINFO [0m lib389:ticket47829_test.py:132 ######################### MODRDN cn=active guy ###################### | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_mod_active_user_modrdn_out_user_1 | 1.04 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47829_test.py:62 ############################################### [32mINFO [0m lib389:ticket47829_test.py:63 ####### [32mINFO [0m lib389:ticket47829_test.py:64 ####### add an Active user to a Active group. Then move Active user to out of scope [32mINFO [0m lib389:ticket47829_test.py:65 ####### [32mINFO [0m lib389:ticket47829_test.py:66 ############################################### [32mINFO [0m lib389:ticket47829_test.py:172 add entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=active guy,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:132 ######################### MODRDN cn=active guy ###################### [32mINFO [0m lib389:ticket47829_test.py:132 ######################### MODRDN cn=active guy ###################### | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_mod_modrdn_1 | 1.07 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47829_test.py:62 ############################################### [32mINFO [0m lib389:ticket47829_test.py:63 ####### [32mINFO [0m lib389:ticket47829_test.py:64 ####### add an Stage user to a Active group. Then move Stage user to Active [32mINFO [0m lib389:ticket47829_test.py:65 ####### [32mINFO [0m lib389:ticket47829_test.py:66 ############################################### [32mINFO [0m lib389:ticket47829_test.py:172 add entry cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:132 ######################### MODRDN cn=stage guy ###################### [32mINFO [0m lib389:ticket47829_test.py:96 !!!!!!! cn=stage guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=stage guy,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:132 ######################### MODRDN cn=stage guy ###################### | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_mod_stage_user_modrdn_active_user_1 | 1.05 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47829_test.py:62 ############################################### [32mINFO [0m lib389:ticket47829_test.py:63 ####### [32mINFO [0m lib389:ticket47829_test.py:64 ####### add an Stage user to a Active group. Then move Stage user to Active [32mINFO [0m lib389:ticket47829_test.py:65 ####### [32mINFO [0m lib389:ticket47829_test.py:66 ############################################### [32mINFO [0m lib389:ticket47829_test.py:172 add entry cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:132 ######################### MODRDN cn=stage guy ###################### [32mINFO [0m lib389:ticket47829_test.py:96 !!!!!!! cn=stage guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=stage guy,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:132 ######################### MODRDN cn=stage guy ###################### | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_mod_stage_user_modrdn_stage_user_1 | 0.00 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47829_test.py:62 ############################################### [32mINFO [0m lib389:ticket47829_test.py:63 ####### [32mINFO [0m lib389:ticket47829_test.py:64 ####### add an Stage user to a Active group. Then move Stage user to Stage [32mINFO [0m lib389:ticket47829_test.py:65 ####### [32mINFO [0m lib389:ticket47829_test.py:66 ############################################### [32mINFO [0m lib389:ticket47829_test.py:62 ############################################### [32mINFO [0m lib389:ticket47829_test.py:63 ####### [32mINFO [0m lib389:ticket47829_test.py:64 ####### Return because it requires a fix for 47833 [32mINFO [0m lib389:ticket47829_test.py:65 ####### [32mINFO [0m lib389:ticket47829_test.py:66 ############################################### | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_indirect_active_group_1 | 2.08 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47829_test.py:62 ############################################### [32mINFO [0m lib389:ticket47829_test.py:63 ####### [32mINFO [0m lib389:ticket47829_test.py:64 ####### add an Active group (G1) to an active group (G0). Then add active user to G1 [32mINFO [0m lib389:ticket47829_test.py:65 ####### [32mINFO [0m lib389:ticket47829_test.py:66 ############################################### [32mINFO [0m lib389:ticket47829_test.py:172 add entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=indirect active group,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:172 delete entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_indirect_active_group_2 | 1.10 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47829_test.py:62 ############################################### [32mINFO [0m lib389:ticket47829_test.py:63 ####### [32mINFO [0m lib389:ticket47829_test.py:64 ####### add an Active group (G1) to an active group (G0). Then add active user to G1. Then move active user to stage [32mINFO [0m lib389:ticket47829_test.py:65 ####### [32mINFO [0m lib389:ticket47829_test.py:66 ############################################### [32mINFO [0m lib389:ticket47829_test.py:172 add entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=indirect active group,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:132 ######################### MODRDN cn=active guy ###################### [32mINFO [0m lib389:ticket47829_test.py:132 ######################### MODRDN cn=active guy ###################### | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_indirect_active_group_3 | 1.07 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47829_test.py:62 ############################################### [32mINFO [0m lib389:ticket47829_test.py:63 ####### [32mINFO [0m lib389:ticket47829_test.py:64 ####### add an Active group (G1) to an active group (G0). Then add active user to G1. Then move active user to out of the scope [32mINFO [0m lib389:ticket47829_test.py:65 ####### [32mINFO [0m lib389:ticket47829_test.py:66 ############################################### [32mINFO [0m lib389:ticket47829_test.py:172 add entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=indirect active group,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:132 ######################### MODRDN cn=active guy ###################### [32mINFO [0m lib389:ticket47829_test.py:132 ######################### MODRDN cn=active guy ###################### | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_indirect_active_group_4 | 1.09 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47829_test.py:62 ############################################### [32mINFO [0m lib389:ticket47829_test.py:63 ####### [32mINFO [0m lib389:ticket47829_test.py:64 ####### add an Active group (G1) to an active group (G0). Then add stage user to G1. Then move user to active. Then move it back [32mINFO [0m lib389:ticket47829_test.py:65 ####### [32mINFO [0m lib389:ticket47829_test.py:66 ############################################### [32mINFO [0m lib389:ticket47829_test.py:172 add entry cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:116 !!!!!!! cn=indirect active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:132 ######################### MODRDN cn=stage guy ###################### [32mINFO [0m lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=stage guy,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:116 !!!!!!! cn=indirect active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:96 !!!!!!! cn=stage guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:96 !!!!!!! cn=stage guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=indirect active group,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:96 !!!!!!! cn=stage guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47829_test.py:132 ######################### MODRDN cn=stage guy ###################### [32mINFO [0m lib389:ticket47829_test.py:116 !!!!!!! cn=indirect active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' | |||
Passed | tickets/ticket47833_test.py::test_ticket47829_init | 5.05 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | tickets/ticket47833_test.py::test_ticket47829_mod_stage_user_modrdn_stage_user_1 | 1.05 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47833_test.py:58 ############################################### [32mINFO [0m lib389:ticket47833_test.py:59 ####### [32mINFO [0m lib389:ticket47833_test.py:60 ####### add an Stage user to a Active group. Then move Stage user to Stage [32mINFO [0m lib389:ticket47833_test.py:61 ####### [32mINFO [0m lib389:ticket47833_test.py:62 ############################################### [32mINFO [0m lib389:ticket47833_test.py:145 add entry cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47833_test.py:146 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com [32mINFO [0m lib389:ticket47833_test.py:112 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com' [32mINFO [0m lib389:ticket47833_test.py:128 ######################### MODRDN cn=xstage guy ###################### [32mINFO [0m lib389:ticket47833_test.py:112 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com' | |||
Passed | tickets/ticket47869MMR_test.py::test_ticket47869_init | 14.51 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 7fba94c4-e42b-4fcf-86d5-b3e14e0ccc2a / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 69f56145-2959-42cb-b9df-87c675a85426 / got description=7fba94c4-e42b-4fcf-86d5-b3e14e0ccc2a) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47869MMR_test.py:51 Add cn=bind_entry, dc=example,dc=com [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect f7fc0cf5-828a-4f69-b7f5-6d2f15bdf969 / got description=69f56145-2959-42cb-b9df-87c675a85426) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 48f34de1-4da3-4e8d-879c-2f20dbec8a9f / got description=f7fc0cf5-828a-4f69-b7f5-6d2f15bdf969) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect a8cab283-e05b-406a-8fc5-ba7b3be51d1d / got description=48f34de1-4da3-4e8d-879c-2f20dbec8a9f) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 1380af8f-0148-4925-975b-0a81d4575d2b / got description=a8cab283-e05b-406a-8fc5-ba7b3be51d1d) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 9bca18f7-df17-4144-8767-9fe42f6e6651 / got description=1380af8f-0148-4925-975b-0a81d4575d2b) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 820b6698-64e5-46e7-a4d9-b810eba1edb2 / got description=9bca18f7-df17-4144-8767-9fe42f6e6651) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 3bb51eb9-2a1a-4f6d-9ee9-a8baabb26452 / got description=820b6698-64e5-46e7-a4d9-b810eba1edb2) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ea458c93-1f1a-4bb8-b811-33f6a02c44cb / got description=3bb51eb9-2a1a-4f6d-9ee9-a8baabb26452) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b39127ed-aa17-4b68-bd5b-ca7aa2097b4e / got description=ea458c93-1f1a-4bb8-b811-33f6a02c44cb) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect f37d5268-d423-49e2-9700-a4baaa6db4b1 / got description=b39127ed-aa17-4b68-bd5b-ca7aa2097b4e) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 87859d56-f1aa-4622-a64a-9072de06cf58 / got description=f37d5268-d423-49e2-9700-a4baaa6db4b1) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working | |||
Passed | tickets/ticket47869MMR_test.py::test_ticket47869_check | 0.21 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47869MMR_test.py:93 ######################### CHECK nscpentrywsi ###################### [32mINFO [0m lib389:ticket47869MMR_test.py:95 ##### Master1: Bind as cn=Directory Manager ##### [32mINFO [0m lib389:ticket47869MMR_test.py:98 Master1: Calling search_ext... [32mINFO [0m lib389:ticket47869MMR_test.py:102 27 results [32mINFO [0m lib389:ticket47869MMR_test.py:104 Results: [32mINFO [0m lib389:ticket47869MMR_test.py:106 dn: dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:106 dn: ou=groups,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:106 dn: ou=people,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:106 dn: ou=permissions,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:106 dn: ou=services,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:106 dn: uid=demo_user,ou=people,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:106 dn: cn=demo_group,ou=groups,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:106 dn: cn=group_admin,ou=permissions,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:106 dn: cn=group_modify,ou=permissions,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:106 dn: cn=user_admin,ou=permissions,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:106 dn: cn=user_modify,ou=permissions,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:106 dn: cn=user_passwd_reset,ou=permissions,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:106 dn: cn=user_private_read,ou=permissions,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:106 dn: cn=replication_managers,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:106 dn: cn=ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701,ou=services,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:106 dn: cn=ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702,ou=services,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:106 dn: cn=bind_entry,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:106 dn: cn=test_entry0,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:106 dn: cn=test_entry1,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:106 dn: cn=test_entry2,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:106 dn: cn=test_entry3,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:106 dn: cn=test_entry4,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:106 dn: cn=test_entry5,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:106 dn: cn=test_entry6,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:106 dn: cn=test_entry7,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:106 dn: cn=test_entry8,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:106 dn: cn=test_entry9,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:110 Master1: count of nscpentrywsi: 27 [32mINFO [0m lib389:ticket47869MMR_test.py:112 ##### Master2: Bind as cn=Directory Manager ##### [32mINFO [0m lib389:ticket47869MMR_test.py:115 Master2: Calling search_ext... [32mINFO [0m lib389:ticket47869MMR_test.py:119 27 results [32mINFO [0m lib389:ticket47869MMR_test.py:121 Results: [32mINFO [0m lib389:ticket47869MMR_test.py:123 dn: dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:123 dn: ou=groups,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:123 dn: ou=people,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:123 dn: ou=permissions,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:123 dn: ou=services,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:123 dn: cn=replication_managers,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:123 dn: cn=demo_group,ou=groups,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:123 dn: uid=demo_user,ou=people,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:123 dn: cn=group_admin,ou=permissions,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:123 dn: cn=group_modify,ou=permissions,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:123 dn: cn=user_admin,ou=permissions,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:123 dn: cn=user_modify,ou=permissions,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:123 dn: cn=user_passwd_reset,ou=permissions,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:123 dn: cn=user_private_read,ou=permissions,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:123 dn: cn=ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63701,ou=services,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:123 dn: cn=ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:63702,ou=services,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:123 dn: cn=bind_entry,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:123 dn: cn=test_entry0,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:123 dn: cn=test_entry1,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:123 dn: cn=test_entry2,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:123 dn: cn=test_entry3,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:123 dn: cn=test_entry4,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:123 dn: cn=test_entry5,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:123 dn: cn=test_entry6,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:123 dn: cn=test_entry7,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:123 dn: cn=test_entry8,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:123 dn: cn=test_entry9,dc=example,dc=com [32mINFO [0m lib389:ticket47869MMR_test.py:127 Master2: count of nscpentrywsi: 27 [32mINFO [0m lib389:ticket47869MMR_test.py:130 ##### Master1: Bind as cn=bind_entry, dc=example,dc=com ##### [32mINFO [0m lib389:ticket47869MMR_test.py:133 Master1: Calling search_ext... [32mINFO [0m lib389:ticket47869MMR_test.py:137 27 results [32mINFO [0m lib389:ticket47869MMR_test.py:143 Master1: count of nscpentrywsi: 0 [32mINFO [0m lib389:ticket47869MMR_test.py:146 ##### Master2: Bind as cn=bind_entry, dc=example,dc=com ##### [32mINFO [0m lib389:ticket47869MMR_test.py:149 Master2: Calling search_ext... [32mINFO [0m lib389:ticket47869MMR_test.py:153 27 results [32mINFO [0m lib389:ticket47869MMR_test.py:159 Master2: count of nscpentrywsi: 0 [32mINFO [0m lib389:ticket47869MMR_test.py:162 ##### Master1: Bind as anonymous ##### [32mINFO [0m lib389:ticket47869MMR_test.py:165 Master1: Calling search_ext... [32mINFO [0m lib389:ticket47869MMR_test.py:169 27 results [32mINFO [0m lib389:ticket47869MMR_test.py:175 Master1: count of nscpentrywsi: 0 [32mINFO [0m lib389:ticket47869MMR_test.py:178 ##### Master2: Bind as anonymous ##### [32mINFO [0m lib389:ticket47869MMR_test.py:181 Master2: Calling search_ext... [32mINFO [0m lib389:ticket47869MMR_test.py:185 27 results [32mINFO [0m lib389:ticket47869MMR_test.py:191 Master2: count of nscpentrywsi: 0 [32mINFO [0m lib389:ticket47869MMR_test.py:193 ##### ticket47869 was successfully verified. ##### | |||
Passed | tickets/ticket47871_test.py::test_ticket47871_init | 3.77 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for consumer1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:169 Joining consumer consumer1 from master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect c058808a-8cc1-4e72-b4f4-792ac1bd9cc5 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is working [32mINFO [0m lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 [32mINFO [0m lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 already exists -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47871_test.py:53 test_ticket47871_init topology_m1c1 <lib389.topologies.TopologyMain object at 0x7faa54615af0> | |||
Passed | tickets/ticket47871_test.py::test_ticket47871_1 | 1.22 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47871_test.py:71 test_ticket47871_init: 10 entries ADDed other_entry[0..9] [32mINFO [0m lib389:ticket47871_test.py:78 Added entries are [32mINFO [0m lib389:ticket47871_test.py:80 changenumber=1,cn=changelog [32mINFO [0m lib389:ticket47871_test.py:80 changenumber=2,cn=changelog [32mINFO [0m lib389:ticket47871_test.py:80 changenumber=3,cn=changelog [32mINFO [0m lib389:ticket47871_test.py:80 changenumber=4,cn=changelog [32mINFO [0m lib389:ticket47871_test.py:80 changenumber=5,cn=changelog [32mINFO [0m lib389:ticket47871_test.py:80 changenumber=6,cn=changelog [32mINFO [0m lib389:ticket47871_test.py:80 changenumber=7,cn=changelog [32mINFO [0m lib389:ticket47871_test.py:80 changenumber=8,cn=changelog [32mINFO [0m lib389:ticket47871_test.py:80 changenumber=9,cn=changelog [32mINFO [0m lib389:ticket47871_test.py:80 changenumber=10,cn=changelog | |||
Passed | tickets/ticket47871_test.py::test_ticket47871_2 | 12.03 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47871_test.py:93 Try no 1 it remains 10 entries [32mINFO [0m lib389:ticket47871_test.py:95 changenumber=1,cn=changelog [32mINFO [0m lib389:ticket47871_test.py:95 changenumber=2,cn=changelog [32mINFO [0m lib389:ticket47871_test.py:95 changenumber=3,cn=changelog [32mINFO [0m lib389:ticket47871_test.py:95 changenumber=4,cn=changelog [32mINFO [0m lib389:ticket47871_test.py:95 changenumber=5,cn=changelog [32mINFO [0m lib389:ticket47871_test.py:95 changenumber=6,cn=changelog [32mINFO [0m lib389:ticket47871_test.py:95 changenumber=7,cn=changelog [32mINFO [0m lib389:ticket47871_test.py:95 changenumber=8,cn=changelog [32mINFO [0m lib389:ticket47871_test.py:95 changenumber=9,cn=changelog [32mINFO [0m lib389:ticket47871_test.py:95 changenumber=10,cn=changelog [32mINFO [0m lib389:ticket47871_test.py:93 Try no 2 it remains 1 entries [32mINFO [0m lib389:ticket47871_test.py:95 changenumber=10,cn=changelog | |||
Passed | tickets/ticket47900_test.py::test_ticket47900 | 1.50 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47900_test.py:52 Creating Password Administator entry cn=passwd_admin,dc=example,dc=com... [32mINFO [0m lib389:ticket47900_test.py:62 Configuring password policy... [32mINFO [0m lib389:ticket47900_test.py:74 Add aci to allow password admin to add/update entries... [32mINFO [0m lib389:ticket47900_test.py:87 Bind as the Password Administator (before activating)... [32mINFO [0m lib389:ticket47900_test.py:101 Attempt to add entries with invalid passwords, these adds should fail... [32mINFO [0m lib389:ticket47900_test.py:105 Create a regular user entry cn=Joe Schmo,dc=example,dc=com with password (2_Short)... [32mINFO [0m lib389:ticket47900_test.py:111 Add failed as expected: password (2_Short) result (Constraint violation) [32mINFO [0m lib389:ticket47900_test.py:105 Create a regular user entry cn=Joe Schmo,dc=example,dc=com with password (No_Number)... [32mINFO [0m lib389:ticket47900_test.py:111 Add failed as expected: password (No_Number) result (Constraint violation) [32mINFO [0m lib389:ticket47900_test.py:105 Create a regular user entry cn=Joe Schmo,dc=example,dc=com with password (N0Special)... [32mINFO [0m lib389:ticket47900_test.py:111 Add failed as expected: password (N0Special) result (Constraint violation) [32mINFO [0m lib389:ticket47900_test.py:105 Create a regular user entry cn=Joe Schmo,dc=example,dc=com with password ({SSHA}bBy8UdtPZwu8uZna9QOYG3Pr41RpIRVDl8wddw==)... [32mINFO [0m lib389:ticket47900_test.py:111 Add failed as expected: password ({SSHA}bBy8UdtPZwu8uZna9QOYG3Pr41RpIRVDl8wddw==) result (Constraint violation) [32mINFO [0m lib389:ticket47900_test.py:123 Activate the Password Administator... [32mINFO [0m lib389:ticket47900_test.py:139 Create a regular user entry cn=Joe Schmo,dc=example,dc=com with password (2_Short)... [32mINFO [0m lib389:ticket47900_test.py:142 Succesfully added entry (cn=Joe Schmo,dc=example,dc=com) [32mINFO [0m lib389:ticket47900_test.py:139 Create a regular user entry cn=Joe Schmo,dc=example,dc=com with password (No_Number)... [32mINFO [0m lib389:ticket47900_test.py:142 Succesfully added entry (cn=Joe Schmo,dc=example,dc=com) [32mINFO [0m lib389:ticket47900_test.py:139 Create a regular user entry cn=Joe Schmo,dc=example,dc=com with password (N0Special)... [32mINFO [0m lib389:ticket47900_test.py:142 Succesfully added entry (cn=Joe Schmo,dc=example,dc=com) [32mINFO [0m lib389:ticket47900_test.py:139 Create a regular user entry cn=Joe Schmo,dc=example,dc=com with password ({SSHA}bBy8UdtPZwu8uZna9QOYG3Pr41RpIRVDl8wddw==)... [32mINFO [0m lib389:ticket47900_test.py:142 Succesfully added entry (cn=Joe Schmo,dc=example,dc=com) [32mINFO [0m lib389:ticket47900_test.py:155 Deactivate Password Administator and try invalid password updates... [32mINFO [0m lib389:ticket47900_test.py:177 Password update failed as expected: password (2_Short) result (Constraint violation) [32mINFO [0m lib389:ticket47900_test.py:177 Password update failed as expected: password (No_Number) result (Constraint violation) [32mINFO [0m lib389:ticket47900_test.py:177 Password update failed as expected: password (N0Special) result (Constraint violation) [32mINFO [0m lib389:ticket47900_test.py:177 Password update failed as expected: password ({SSHA}bBy8UdtPZwu8uZna9QOYG3Pr41RpIRVDl8wddw==) result (Constraint violation) [32mINFO [0m lib389:ticket47900_test.py:188 Activate Password Administator and try updates again... [32mINFO [0m lib389:ticket47900_test.py:205 Password update succeeded (2_Short) [32mINFO [0m lib389:ticket47900_test.py:205 Password update succeeded (No_Number) [32mINFO [0m lib389:ticket47900_test.py:205 Password update succeeded (N0Special) [32mINFO [0m lib389:ticket47900_test.py:205 Password update succeeded ({SSHA}bBy8UdtPZwu8uZna9QOYG3Pr41RpIRVDl8wddw==) | |||
Passed | tickets/ticket47910_test.py::test_ticket47910_logconv_start_end_positive | 0.76 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.utils:ticket47910_test.py:36 Diable access log buffering [32mINFO [0m lib389.utils:ticket47910_test.py:39 Do a ldapsearch operation [32mINFO [0m lib389.utils:ticket47910_test.py:42 sleep for sometime so that access log file get generated -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:ticket47910_test.py:79 Running test_ticket47910 - Execute logconv.pl -S -E with random values [32mINFO [0m lib389.utils:ticket47910_test.py:81 taking current time with offset of 2 mins and formatting it to feed -S [32mINFO [0m lib389.utils:ticket47910_test.py:85 taking current time with offset of 2 mins and formatting it to feed -E [32mINFO [0m lib389.utils:ticket47910_test.py:89 Executing logconv.pl with -S and -E [32mINFO [0m lib389.utils:ticket47910_test.py:61 Executing logconv.pl with -S current time and -E end time [32mINFO [0m lib389.utils:ticket47910_test.py:63 /usr/bin/logconv.pl -S [09/Nov/2020:23:39:42] -E [09/Nov/2020:23:43:42] /var/log/dirsrv/slapd-standalone1/access [32mINFO [0m lib389.utils:ticket47910_test.py:66 standard outputAccess Log Analyzer 8.2 Command: logconv.pl /var/log/dirsrv/slapd-standalone1/access Processing 1 Access Log(s)... [001] /var/log/dirsrv/slapd-standalone1/access size (bytes): 9325 Total Log Lines Analysed: 77 ----------- Access Log Output ------------ Start of Logs: 09/Nov/2020:23:39:42 End of Logs: 09/Nov/2020:23:41:40.954260478 Processed Log Time: 0 Hours, 1 Minutes, 58.954262528 Seconds Restarts: 2 Peak Concurrent Connections: 1 Total Operations: 35 Total Results: 33 Overall Performance: 94.3% Total Connections: 3 (0.03/sec) (1.51/min) - LDAP Connections: 1 (0.01/sec) (0.50/min) - LDAPI Connections: 2 (0.02/sec) (1.01/min) - LDAPS Connections: 0 (0.00/sec) (0.00/min) - StartTLS Extended Ops: 0 (0.00/sec) (0.00/min) Searches: 8 (0.07/sec) (4.04/min) Modifications: 4 (0.03/sec) (2.02/min) Adds: 18 (0.15/sec) (9.08/min) Deletes: 0 (0.00/sec) (0.00/min) Mod RDNs: 0 (0.00/sec) (0.00/min) Compares: 0 (0.00/sec) (0.00/min) Binds: 5 (0.04/sec) (2.52/min) Average wtime (wait time): 0.000260434 Average optime (op time): 0.014920374 Average etime (elapsed time): 0.015173701 Proxied Auth Operations: 0 Persistent Searches: 0 Internal Operations: 0 Entry Operations: 0 Extended Operations: 0 Abandoned Requests: 0 Smart Referrals Received: 0 VLV Operations: 0 VLV Unindexed Searches: 0 VLV Unindexed Components: 0 SORT Operations: 0 Entire Search Base Queries: 1 Paged Searches: 0 Unindexed Searches: 0 Unindexed Components: 1 Invalid Attribute Filters: 0 FDs Taken: 3 FDs Returned: 2 Highest FD Taken: 65 Broken Pipes: 0 Connections Reset By Peer: 0 Resource Unavailable: 0 Max BER Size Exceeded: 0 Binds: 5 Unbinds: 1 -------------------------------- - LDAP v2 Binds: 0 - LDAP v3 Binds: 3 - AUTOBINDs(LDAPI): 2 - SSL Client Binds: 0 - Failed SSL Client Binds: 0 - SASL Binds: 2 - EXTERNAL: 2 - Directory Manager Binds: 1 - Anonymous Binds: 0 Cleaning up temp files... Done. [32mINFO [0m lib389.utils:ticket47910_test.py:67 standard errors | |||
Passed | tickets/ticket47910_test.py::test_ticket47910_logconv_start_end_negative | 0.23 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:ticket47910_test.py:105 Running test_ticket47910 - Execute logconv.pl -S -E with starttime>endtime [32mINFO [0m lib389.utils:ticket47910_test.py:107 taking current time with offset of 2 mins and formatting it to feed -S [32mINFO [0m lib389.utils:ticket47910_test.py:111 taking current time with offset of 2 mins and formatting it to feed -E [32mINFO [0m lib389.utils:ticket47910_test.py:115 Executing logconv.pl with -S and -E [32mINFO [0m lib389.utils:ticket47910_test.py:61 Executing logconv.pl with -S current time and -E end time [32mINFO [0m lib389.utils:ticket47910_test.py:63 /usr/bin/logconv.pl -S [09/Nov/2020:23:43:43] -E [09/Nov/2020:23:39:43] /var/log/dirsrv/slapd-standalone1/access [32mINFO [0m lib389.utils:ticket47910_test.py:66 standard outputAccess Log Analyzer 8.2 Command: logconv.pl /var/log/dirsrv/slapd-standalone1/access Start time ([09/Nov/2020:23:43:43]) is greater than end time ([09/Nov/2020:23:39:43])! Cleaning up temp files... Done. [32mINFO [0m lib389.utils:ticket47910_test.py:67 standard errors | |||
Passed | tickets/ticket47910_test.py::test_ticket47910_logconv_start_end_invalid | 0.23 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:ticket47910_test.py:128 Running test_ticket47910 - Execute logconv.pl -S -E with invalid timestamp [32mINFO [0m lib389.utils:ticket47910_test.py:129 Set start time and end time to invalid values [32mINFO [0m lib389.utils:ticket47910_test.py:133 Executing logconv.pl with -S and -E [32mINFO [0m lib389.utils:ticket47910_test.py:61 Executing logconv.pl with -S current time and -E end time [32mINFO [0m lib389.utils:ticket47910_test.py:63 /usr/bin/logconv.pl -S invalid -E invalid /var/log/dirsrv/slapd-standalone1/access [32mINFO [0m lib389.utils:ticket47910_test.py:66 standard outputAccess Log Analyzer 8.2 Command: logconv.pl /var/log/dirsrv/slapd-standalone1/access The date string (invalid) is invalid, exiting... Cleaning up temp files... Done. [32mINFO [0m lib389.utils:ticket47910_test.py:67 standard errors | |||
Passed | tickets/ticket47910_test.py::test_ticket47910_logconv_noaccesslogs | 0.19 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.utils:ticket47910_test.py:147 Running test_ticket47910 - Execute logconv.pl without access logs [32mINFO [0m lib389.utils:ticket47910_test.py:149 taking current time with offset of 2 mins and formatting it to feed -S [32mINFO [0m lib389.utils:ticket47910_test.py:152 Executing logconv.pl with -S current time [32mINFO [0m lib389.utils:ticket47910_test.py:154 /usr/bin/logconv.pl -S [09/Nov/2020:23:39:43] [32mINFO [0m lib389.utils:ticket47910_test.py:157 standard outputThere are no access logs specified, or the tool options have not been used correctly! Cleaning up temp files... Done. [32mINFO [0m lib389.utils:ticket47910_test.py:158 standard errors | |||
Passed | tickets/ticket47920_test.py::test_ticket47920_init | 0.30 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | tickets/ticket47920_test.py::test_ticket47920_mod_readentry_ctrl | 0.01 | |
------------------------------Captured stdout call------------------------------ ['final description'] -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket47920_test.py:65 ############################################### [32mINFO [0m lib389:ticket47920_test.py:66 ####### [32mINFO [0m lib389:ticket47920_test.py:67 ####### MOD: with a readentry control [32mINFO [0m lib389:ticket47920_test.py:68 ####### [32mINFO [0m lib389:ticket47920_test.py:69 ############################################### [32mINFO [0m lib389:ticket47920_test.py:106 Check the initial value of the entry | |||
Passed | tickets/ticket47921_test.py::test_ticket47921 | 0.35 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket47921_test:ticket47921_test.py:81 Test complete | |||
Passed | tickets/ticket47927_test.py::test_ticket47927_init | 5.00 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | tickets/ticket47927_test.py::test_ticket47927_one | 0.06 | |
-------------------------------Captured log call-------------------------------- [31mCRITICAL[0m tests.tickets.ticket47927_test:ticket47927_test.py:100 test_ticket47927_one: Failed (expected) to set the telephonenumber for cn=test_2,cn=enforced_container,dc=example,dc=com: Constraint violation [31mCRITICAL[0m tests.tickets.ticket47927_test:ticket47927_test.py:111 test_ticket47927_one: Failed (expected) to set the telephonenumber for cn=test_3,cn=excluded_container,dc=example,dc=com: Constraint violation | |||
Passed | tickets/ticket47927_test.py::test_ticket47927_two | 4.83 | |
No log output captured. | |||
Passed | tickets/ticket47927_test.py::test_ticket47927_three | 0.05 | |
-------------------------------Captured log call-------------------------------- [31mCRITICAL[0m tests.tickets.ticket47927_test:ticket47927_test.py:151 test_ticket47927_three: Failed (expected) to set the telephonenumber for cn=test_2,cn=enforced_container,dc=example,dc=com: Constraint violation [31mCRITICAL[0m tests.tickets.ticket47927_test:ticket47927_test.py:158 test_ticket47927_three: success to set the telephonenumber for cn=test_3,cn=excluded_container,dc=example,dc=com | |||
Passed | tickets/ticket47927_test.py::test_ticket47927_four | 0.02 | |
-------------------------------Captured log call-------------------------------- [31mCRITICAL[0m tests.tickets.ticket47927_test:ticket47927_test.py:176 test_ticket47927_four: success to set the telephonenumber for cn=test_3,cn=excluded_container,dc=example,dc=com [31mCRITICAL[0m tests.tickets.ticket47927_test:ticket47927_test.py:198 test_ticket47927_four: Failed (expected) to set the telephonenumber for cn=test_2,cn=enforced_container,dc=example,dc=com: Constraint violation | |||
Passed | tickets/ticket47927_test.py::test_ticket47927_five | 4.56 | |
No log output captured. | |||
Passed | tickets/ticket47927_test.py::test_ticket47927_six | 0.06 | |
-------------------------------Captured log call-------------------------------- [31mCRITICAL[0m tests.tickets.ticket47927_test:ticket47927_test.py:240 test_ticket47927_six: Failed (expected) to set the telephonenumber for cn=test_2,cn=enforced_container,dc=example,dc=com: Constraint violation [31mCRITICAL[0m tests.tickets.ticket47927_test:ticket47927_test.py:247 test_ticket47927_six: success to set the telephonenumber for cn=test_3,cn=excluded_container,dc=example,dc=com [31mCRITICAL[0m tests.tickets.ticket47927_test:ticket47927_test.py:256 test_ticket47927_six: success to set the telephonenumber for cn=test_4,cn=excluded_bis_container,dc=example,dc=com | |||
Passed | tickets/ticket47931_test.py::test_ticket47931 | 69.07 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. ------------------------------Captured stderr call------------------------------ Exception in thread Thread-71: Traceback (most recent call last): File "/usr/lib64/python3.8/threading.py", line 932, in _bootstrap_inner self.run() File "/export/tests/tickets/ticket47931_test.py", line 36, in run conn.set_option(ldap.OPT_TIMEOUT, self.timeout) File "/usr/local/lib/python3.8/site-packages/lib389/__init__.py", line 180, in inner return f(*args, **kwargs) File "/usr/lib64/python3.8/site-packages/ldap/ldapobject.py", line 937, in set_option return self._ldap_call(self._l.set_option,option,invalue) File "/usr/lib64/python3.8/site-packages/ldap/ldapobject.py", line 362, in __getattr__ raise AttributeError('%s has no attribute %s' % ( AttributeError: DirSrv has no attribute '_l' -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:backend.py:80 List backend with suffix=dc=deadlock [32mINFO [0m lib389:backend.py:290 Creating a local backend [32mINFO [0m lib389:backend.py:76 List backend cn=deadlock,cn=ldbm database,cn=plugins,cn=config [32mINFO [0m lib389:__init__.py:1713 Found entry dn: cn=deadlock,cn=ldbm database,cn=plugins,cn=config cn: deadlock nsslapd-cachememsize: 512000 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-standalone1/db/deadlock nsslapd-dncachememsize: 16777216 nsslapd-readonly: off nsslapd-require-index: off nsslapd-require-internalop-index: off nsslapd-suffix: dc=deadlock objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance [32mINFO [0m lib389:mappingTree.py:154 Entry dn: cn="dc=deadlock",cn=mapping tree,cn=config cn: dc=deadlock nsslapd-backend: deadlock nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree [32mINFO [0m lib389:__init__.py:1713 Found entry dn: cn=dc\3Ddeadlock,cn=mapping tree,cn=config cn: dc=deadlock nsslapd-backend: deadlock nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree [32mINFO [0m tests.tickets.ticket47931_test:ticket47931_test.py:142 Adding members to the group... [32mINFO [0m tests.tickets.ticket47931_test:ticket47931_test.py:158 Finished adding members to the group. [32mINFO [0m tests.tickets.ticket47931_test:ticket47931_test.py:164 Test complete | |||
Passed | tickets/ticket47953_test.py::test_ticket47953 | 5.09 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:tasks.py:498 Import task import_11092020_234419 for file /var/lib/dirsrv/slapd-standalone1/ldif/ticket47953.ldif completed successfully | |||
Passed | tickets/ticket47963_test.py::test_ticket47963 | 8.14 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket47963_test:ticket47963_test.py:145 Test complete | |||
Passed | tickets/ticket47970_test.py::test_ticket47970 | 0.28 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | tickets/ticket47976_test.py::test_ticket47976_init | 4.68 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | tickets/ticket47976_test.py::test_ticket47976_1 | 3.98 | |
No log output captured. | |||
Passed | tickets/ticket47976_test.py::test_ticket47976_2 | 5.15 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket47976_test:ticket47976_test.py:99 Test complete [32mINFO [0m tests.tickets.ticket47976_test:ticket47976_test.py:104 Export LDIF file... [32mINFO [0m lib389:tasks.py:567 Export task export_11092020_234523 for file /var/lib/dirsrv/slapd-standalone1/ldif/export.ldif completed successfully [32mINFO [0m tests.tickets.ticket47976_test:ticket47976_test.py:115 Import LDIF file... [32mINFO [0m lib389:tasks.py:498 Import task import_11092020_234525 for file /var/lib/dirsrv/slapd-standalone1/ldif/export.ldif completed successfully | |||
Passed | tickets/ticket47976_test.py::test_ticket47976_3 | 0.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket47976_test:ticket47976_test.py:131 Testing if the delete will hang or not [32mINFO [0m tests.tickets.ticket47976_test:ticket47976_test.py:150 user0 was correctly deleted [32mINFO [0m tests.tickets.ticket47976_test:ticket47976_test.py:150 user1 was correctly deleted | |||
Passed | tickets/ticket47980_test.py::test_ticket47980 | 2.67 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | tickets/ticket47981_test.py::test_ticket47981 | 3.67 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:backend.py:80 List backend with suffix=o=netscaperoot [32mINFO [0m lib389:backend.py:290 Creating a local backend [32mINFO [0m lib389:backend.py:76 List backend cn=netscaperoot,cn=ldbm database,cn=plugins,cn=config [32mINFO [0m lib389:__init__.py:1713 Found entry dn: cn=netscaperoot,cn=ldbm database,cn=plugins,cn=config cn: netscaperoot nsslapd-cachememsize: 512000 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-standalone1/db/netscaperoot nsslapd-dncachememsize: 16777216 nsslapd-readonly: off nsslapd-require-index: off nsslapd-require-internalop-index: off nsslapd-suffix: o=netscaperoot objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance [32mINFO [0m lib389:mappingTree.py:154 Entry dn: cn="o=netscaperoot",cn=mapping tree,cn=config cn: o=netscaperoot nsslapd-backend: netscaperoot nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree [32mINFO [0m lib389:__init__.py:1713 Found entry dn: cn=o\3Dnetscaperoot,cn=mapping tree,cn=config cn: o=netscaperoot nsslapd-backend: netscaperoot nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree | |||
Passed | tickets/ticket48005_test.py::test_ticket48005_memberof | 14.29 | |
------------------------------Captured stderr call------------------------------ ls: cannot access '/var/log/dirsrv/slapd-standalone1/core*': No such file or directory -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48005_test:ticket48005_test.py:86 Ticket 48005 memberof test... [32mINFO [0m lib389:tasks.py:877 fixupMemberOf task fixupmemberof_11092020_234710 for basedn dc=example,dc=com completed successfully [32mINFO [0m tests.tickets.ticket48005_test:ticket48005_test.py:110 No core files are found [32mINFO [0m tests.tickets.ticket48005_test:ticket48005_test.py:119 Ticket 48005 memberof test complete | |||
Passed | tickets/ticket48005_test.py::test_ticket48005_automember | 22.55 | |
------------------------------Captured stderr call------------------------------ ls: cannot access '/var/log/dirsrv/slapd-standalone1/core*': No such file or directory ls: cannot access '/var/log/dirsrv/slapd-standalone1/core*': No such file or directory ls: cannot access '/var/log/dirsrv/slapd-standalone1/core*': No such file or directory -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48005_test:ticket48005_test.py:138 Ticket 48005 automember test... [32mINFO [0m tests.tickets.ticket48005_test:ticket48005_test.py:143 Adding automember config [32mINFO [0m lib389:tasks.py:986 Automember Rebuild Membership task(task-11092020_234724) completedsuccessfully [32mINFO [0m tests.tickets.ticket48005_test:ticket48005_test.py:176 No core files are found [32mINFO [0m lib389:tasks.py:1039 Automember Export Updates task (task-11092020_234728) completed successfully [32mINFO [0m tests.tickets.ticket48005_test:ticket48005_test.py:198 No core files are found [32mINFO [0m lib389:tasks.py:1087 Automember Map Updates task (task-11092020_234732) completed successfully [32mINFO [0m tests.tickets.ticket48005_test:ticket48005_test.py:222 No core files are found [32mINFO [0m tests.tickets.ticket48005_test:ticket48005_test.py:231 Ticket 48005 automember test complete | |||
Passed | tickets/ticket48005_test.py::test_ticket48005_syntaxvalidate | 3.96 | |
------------------------------Captured stderr call------------------------------ ls: cannot access '/var/log/dirsrv/slapd-standalone1/core*': No such file or directory -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48005_test:ticket48005_test.py:241 Ticket 48005 syntax validate test... [32mINFO [0m lib389:tasks.py:1255 Syntax Validate task (task-11092020_234742) completed successfully [32mINFO [0m tests.tickets.ticket48005_test:ticket48005_test.py:261 No core files are found [32mINFO [0m tests.tickets.ticket48005_test:ticket48005_test.py:265 Ticket 48005 syntax validate test complete | |||
Passed | tickets/ticket48005_test.py::test_ticket48005_usn | 15.68 | |
------------------------------Captured stderr call------------------------------ ls: cannot access '/var/log/dirsrv/slapd-standalone1/core*': No such file or directory -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48005_test:ticket48005_test.py:277 Ticket 48005 usn test... [32mINFO [0m tests.tickets.ticket48005_test:ticket48005_test.py:285 No user entries. [32mINFO [0m lib389:tasks.py:1304 USN tombstone cleanup task (task-11092020_234751) completed successfully [32mINFO [0m tests.tickets.ticket48005_test:ticket48005_test.py:316 No core files are found [32mINFO [0m tests.tickets.ticket48005_test:ticket48005_test.py:324 Ticket 48005 usn test complete | |||
Passed | tickets/ticket48005_test.py::test_ticket48005_schemareload | 3.87 | |
------------------------------Captured stderr call------------------------------ ls: cannot access '/var/log/dirsrv/slapd-standalone1/core*': No such file or directory -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48005_test:ticket48005_test.py:334 Ticket 48005 schema reload test... [32mINFO [0m lib389:tasks.py:1169 Schema Reload task (task-11092020_234802) completed successfully [32mINFO [0m tests.tickets.ticket48005_test:ticket48005_test.py:354 No core files are found [32mINFO [0m tests.tickets.ticket48005_test:ticket48005_test.py:358 Ticket 48005 schema reload test complete | |||
Passed | tickets/ticket48026_test.py::test_ticket48026 | 5.50 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48026_test:ticket48026_test.py:114 Test complete | |||
Passed | tickets/ticket48109_test.py::test_ticket48109 | 30.75 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48109_test:ticket48109_test.py:32 Test case 0 [32mINFO [0m tests.tickets.ticket48109_test:ticket48109_test.py:77 match: conn=1 op=3 [32mINFO [0m tests.tickets.ticket48109_test:ticket48109_test.py:85 l1: [09/Nov/2020:23:49:00.448910567 -0500] conn=1 op=3 RESULT err=0 tag=101 nentries=1 wtime=0.000599571 optime=0.000643794 etime=0.001237787 [32mINFO [0m tests.tickets.ticket48109_test:ticket48109_test.py:88 match: nentires=1 [32mINFO [0m tests.tickets.ticket48109_test:ticket48109_test.py:93 Entry uid=a* found. [32mINFO [0m tests.tickets.ticket48109_test:ticket48109_test.py:100 Test case 0 - OK - substr index used [32mINFO [0m tests.tickets.ticket48109_test:ticket48109_test.py:119 Test case 1 [32mINFO [0m tests.tickets.ticket48109_test:ticket48109_test.py:163 match: conn=1 op=3 [32mINFO [0m tests.tickets.ticket48109_test:ticket48109_test.py:171 l1: [09/Nov/2020:23:49:00.448910567 -0500] conn=1 op=3 RESULT err=0 tag=101 nentries=1 wtime=0.000599571 optime=0.000643794 etime=0.001237787 [32mINFO [0m tests.tickets.ticket48109_test:ticket48109_test.py:174 match: nentires=1 [32mINFO [0m tests.tickets.ticket48109_test:ticket48109_test.py:179 Entry uid=*b found. [32mINFO [0m tests.tickets.ticket48109_test:ticket48109_test.py:186 Test case 1 - OK - substr index used [32mINFO [0m tests.tickets.ticket48109_test:ticket48109_test.py:208 Test case 2 [32mINFO [0m tests.tickets.ticket48109_test:ticket48109_test.py:259 match: conn=1 op=3 [32mINFO [0m tests.tickets.ticket48109_test:ticket48109_test.py:267 l1: [09/Nov/2020:23:49:00.448910567 -0500] conn=1 op=3 RESULT err=0 tag=101 nentries=1 wtime=0.000599571 optime=0.000643794 etime=0.001237787 [32mINFO [0m tests.tickets.ticket48109_test:ticket48109_test.py:270 match: nentires=1 [32mINFO [0m tests.tickets.ticket48109_test:ticket48109_test.py:275 Entry uid=c* found. [32mINFO [0m tests.tickets.ticket48109_test:ticket48109_test.py:282 Test case 2-1 - OK - correct substr index used [32mINFO [0m tests.tickets.ticket48109_test:ticket48109_test.py:294 match: conn=1 op=4 [32mINFO [0m tests.tickets.ticket48109_test:ticket48109_test.py:302 l1: [09/Nov/2020:23:49:21.109017772 -0500] conn=1 op=4 RESULT err=0 tag=101 nentries=1 wtime=0.000261344 optime=0.000443213 etime=0.000701054 [32mINFO [0m tests.tickets.ticket48109_test:ticket48109_test.py:305 match: nentires=1 [32mINFO [0m tests.tickets.ticket48109_test:ticket48109_test.py:310 Entry uid=*2 found. [32mINFO [0m tests.tickets.ticket48109_test:ticket48109_test.py:317 Test case 2-2 - OK - correct substr index used [32mINFO [0m tests.tickets.ticket48109_test:ticket48109_test.py:331 Testcase PASSED | |||
Passed | tickets/ticket48170_test.py::test_ticket48170 | 0.01 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48170_test:ticket48170_test.py:29 Index update correctly rejected [32mINFO [0m tests.tickets.ticket48170_test:ticket48170_test.py:36 Test complete | |||
Passed | tickets/ticket48194_test.py::test_init | 7.86 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket48194_test.py:40 ############################################### [32mINFO [0m lib389:ticket48194_test.py:41 ####### Testing Ticket 48194 - harden the list of ciphers available by default [32mINFO [0m lib389:ticket48194_test.py:42 ############################################### [32mINFO [0m lib389.utils:ticket48194_test.py:57 ######################### enable SSL in the directory server with all ciphers ###################### | |||
Passed | tickets/ticket48194_test.py::test_run_0 | 6.81 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket48194_test.py:40 ############################################### [32mINFO [0m lib389:ticket48194_test.py:41 ####### Test Case 1 - Check the ciphers availability for "+all"; allowWeakCipher: on [32mINFO [0m lib389:ticket48194_test.py:42 ############################################### [32mINFO [0m lib389.utils:ticket48194_test.py:131 ######################### Restarting the server ###################### [32mINFO [0m lib389.utils:ticket48194_test.py:86 Testing DES-CBC3-SHA -- expect to handshake successfully [32mINFO [0m lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher DES-CBC3-SHA [32mINFO [0m lib389.utils:ticket48194_test.py:105 Found: b'New, TLSv1.3, Cipher is TLS_AES_128_GCM_SHA256\n' [32mINFO [0m lib389.utils:ticket48194_test.py:86 Testing AES256-SHA256 -- expect to handshake successfully [32mINFO [0m lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher AES256-SHA256 [32mINFO [0m lib389.utils:ticket48194_test.py:105 Found: b'New, TLSv1.3, Cipher is TLS_AES_128_GCM_SHA256\n' | |||
Passed | tickets/ticket48194_test.py::test_run_3 | 6.15 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket48194_test.py:40 ############################################### [32mINFO [0m lib389:ticket48194_test.py:41 ####### Test Case 4 - Check the ciphers availability for "-all" [32mINFO [0m lib389:ticket48194_test.py:42 ############################################### [32mINFO [0m lib389.utils:ticket48194_test.py:199 ######################### Restarting the server ###################### [32mINFO [0m lib389.utils:ticket48194_test.py:86 Testing DES-CBC3-SHA -- expect to handshake failed [32mINFO [0m lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher DES-CBC3-SHA [32mINFO [0m lib389.utils:ticket48194_test.py:105 Found: b'New, (NONE), Cipher is (NONE)\n' [32mINFO [0m lib389.utils:ticket48194_test.py:86 Testing AES256-SHA256 -- expect to handshake failed [32mINFO [0m lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher AES256-SHA256 [32mINFO [0m lib389.utils:ticket48194_test.py:105 Found: b'New, (NONE), Cipher is (NONE)\n' | |||
Passed | tickets/ticket48194_test.py::test_run_9 | 7.89 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket48194_test.py:40 ############################################### [32mINFO [0m lib389:ticket48194_test.py:41 ####### Test Case 10 - Check no nsSSL3Ciphers (default setting) with no errorlog-level & allowWeakCipher on [32mINFO [0m lib389:ticket48194_test.py:42 ############################################### [32mINFO [0m lib389.utils:ticket48194_test.py:316 ######################### Restarting the server ###################### [32mINFO [0m lib389.utils:ticket48194_test.py:86 Testing DES-CBC3-SHA -- expect to handshake successfully [32mINFO [0m lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher DES-CBC3-SHA [32mINFO [0m lib389.utils:ticket48194_test.py:105 Found: b'New, TLSv1.3, Cipher is TLS_AES_128_GCM_SHA256\n' [32mINFO [0m lib389.utils:ticket48194_test.py:86 Testing AES256-SHA256 -- expect to handshake successfully [32mINFO [0m lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher AES256-SHA256 [32mINFO [0m lib389.utils:ticket48194_test.py:105 Found: b'New, TLSv1.3, Cipher is TLS_AES_128_GCM_SHA256\n' | |||
Passed | tickets/ticket48194_test.py::test_run_11 | 6.41 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket48194_test.py:40 ############################################### [32mINFO [0m lib389:ticket48194_test.py:41 ####### Test Case 12 - Check nsSSL3Ciphers: +fortezza, which is not supported [32mINFO [0m lib389:ticket48194_test.py:42 ############################################### [32mINFO [0m lib389.utils:ticket48194_test.py:337 ######################### Restarting the server ###################### [32mINFO [0m lib389.utils:ticket48194_test.py:86 Testing DES-CBC3-SHA -- expect to handshake failed [32mINFO [0m lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher DES-CBC3-SHA [32mINFO [0m lib389.utils:ticket48194_test.py:105 Found: b'New, (NONE), Cipher is (NONE)\n' [32mINFO [0m lib389.utils:ticket48194_test.py:86 Testing AES256-SHA256 -- expect to handshake failed [32mINFO [0m lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher AES256-SHA256 [32mINFO [0m lib389.utils:ticket48194_test.py:105 Found: b'New, (NONE), Cipher is (NONE)\n' | |||
Passed | tickets/ticket48212_test.py::test_ticket48212 | 15.53 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. ------------------------------Captured stderr call------------------------------ /bin/sh: /usr/sbin/dbverify: No such file or directory /bin/sh: /usr/sbin/dbverify: No such file or directory /bin/sh: /usr/sbin/dbverify: No such file or directory /bin/sh: /usr/sbin/dbverify: No such file or directory -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket48212_test.py:70 Bind as cn=Directory Manager [32mINFO [0m lib389:ticket48212_test.py:83 ######################### Import Test data (/var/lib/dirsrv/slapd-standalone1/ldif/example1k_posix.ldif) ###################### [32mINFO [0m lib389:tasks.py:498 Import task import_11092020_235133 for file /var/lib/dirsrv/slapd-standalone1/ldif/example1k_posix.ldif completed successfully [32mINFO [0m lib389:ticket48212_test.py:19 +++++ dbverify +++++ [32mINFO [0m lib389:ticket48212_test.py:23 Running /usr/sbin/dbverify -Z standalone1 -V [32mINFO [0m lib389:ticket48212_test.py:43 dbverify passed [32mINFO [0m lib389:ticket48212_test.py:92 ######################### Add index by uidnumber ###################### [32mINFO [0m lib389:ticket48212_test.py:101 ######################### reindexing... ###################### [32mINFO [0m lib389:ticket48212_test.py:47 +++++ reindex uidnumber +++++ [32mINFO [0m lib389:tasks.py:798 Index task index_attrs_11092020_235136 completed successfully [32mINFO [0m lib389:ticket48212_test.py:19 +++++ dbverify +++++ [32mINFO [0m lib389:ticket48212_test.py:23 Running /usr/sbin/dbverify -Z standalone1 -V [32mINFO [0m lib389:ticket48212_test.py:43 dbverify passed [32mINFO [0m lib389:ticket48212_test.py:106 ######################### Add nsMatchingRule ###################### [32mINFO [0m lib389:ticket48212_test.py:112 ######################### reindexing... ###################### [32mINFO [0m lib389:ticket48212_test.py:47 +++++ reindex uidnumber +++++ [32mINFO [0m lib389:tasks.py:798 Index task index_attrs_11092020_235140 completed successfully [32mINFO [0m lib389:ticket48212_test.py:19 +++++ dbverify +++++ [32mINFO [0m lib389:ticket48212_test.py:23 Running /usr/sbin/dbverify -Z standalone1 -V [32mINFO [0m lib389:ticket48212_test.py:43 dbverify passed [32mINFO [0m lib389:ticket48212_test.py:117 ######################### Delete nsMatchingRule ###################### [32mINFO [0m lib389:ticket48212_test.py:47 +++++ reindex uidnumber +++++ [32mINFO [0m lib389:tasks.py:798 Index task index_attrs_11092020_235144 completed successfully [32mINFO [0m lib389:ticket48212_test.py:19 +++++ dbverify +++++ [32mINFO [0m lib389:ticket48212_test.py:23 Running /usr/sbin/dbverify -Z standalone1 -V [32mINFO [0m lib389:ticket48212_test.py:43 dbverify passed | |||
Passed | tickets/ticket48214_test.py::test_ticket48214_run | 0.16 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket48214_test.py:83 Bind as cn=Directory Manager [32mINFO [0m lib389:ticket48214_test.py:86 ######################### Out of Box ###################### [32mINFO [0m lib389:ticket48214_test.py:44 +++++ Check Max Ber Size +++++ [32mINFO [0m lib389:ticket48214_test.py:18 +++++ Get maxbersize from dse.ldif +++++ [32mINFO [0m lib389:ticket48214_test.py:21 Run CMD: egrep nsslapd-maxbersize /etc/dirsrv/slapd-standalone1/dse.ldif [32mINFO [0m lib389:ticket48214_test.py:28 Empty: [32mINFO [0m lib389:ticket48214_test.py:50 No nsslapd-maxbersize found in dse.ldif [32mINFO [0m lib389:ticket48214_test.py:63 ldapsearch returned nsslapd-maxbersize: b'2097152' [32mINFO [0m lib389:ticket48214_test.py:72 Checking 2097152 vs 2097152 [32mINFO [0m lib389:ticket48214_test.py:89 ######################### Add nsslapd-maxbersize: 0 ###################### [32mINFO [0m lib389:ticket48214_test.py:44 +++++ Check Max Ber Size +++++ [32mINFO [0m lib389:ticket48214_test.py:18 +++++ Get maxbersize from dse.ldif +++++ [32mINFO [0m lib389:ticket48214_test.py:21 Run CMD: egrep nsslapd-maxbersize /etc/dirsrv/slapd-standalone1/dse.ldif [32mINFO [0m lib389:ticket48214_test.py:35 Right format - nsslapd-maxbersize: 0 [32mINFO [0m lib389:ticket48214_test.py:52 nsslapd-maxbersize: 0 [32mINFO [0m lib389:ticket48214_test.py:63 ldapsearch returned nsslapd-maxbersize: b'2097152' [32mINFO [0m lib389:ticket48214_test.py:72 Checking 2097152 vs 2097152 [32mINFO [0m lib389:ticket48214_test.py:93 ######################### Add nsslapd-maxbersize: 10000 ###################### [32mINFO [0m lib389:ticket48214_test.py:44 +++++ Check Max Ber Size +++++ [32mINFO [0m lib389:ticket48214_test.py:18 +++++ Get maxbersize from dse.ldif +++++ [32mINFO [0m lib389:ticket48214_test.py:21 Run CMD: egrep nsslapd-maxbersize /etc/dirsrv/slapd-standalone1/dse.ldif [32mINFO [0m lib389:ticket48214_test.py:35 Right format - nsslapd-maxbersize: 10000 [32mINFO [0m lib389:ticket48214_test.py:55 nsslapd-maxbersize: 10000 [32mINFO [0m lib389:ticket48214_test.py:63 ldapsearch returned nsslapd-maxbersize: b'10000' [32mINFO [0m lib389:ticket48214_test.py:98 ticket48214 was successfully verified. | |||
Passed | tickets/ticket48233_test.py::test_ticket48233 | 6.09 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48233_test:ticket48233_test.py:54 Test complete | |||
Passed | tickets/ticket48252_test.py::test_ticket48252_setup | 0.27 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | tickets/ticket48252_test.py::test_ticket48252_run_0 | 13.50 | |
------------------------------Captured stderr call------------------------------ [09/Nov/2020:23:53:17.445102197 -0500] - INFO - slapd_exemode_db2index - Backend Instance: userRoot [09/Nov/2020:23:53:17.461108379 -0500] - INFO - ldbm_instance_config_cachememsize_set - force a minimal value 512000 [09/Nov/2020:23:53:17.473504599 -0500] - INFO - bdb_instance_start - Import is running with nsslapd-db-private-import-mem on; No other process is allowed to access the database [09/Nov/2020:23:53:17.481082403 -0500] - INFO - check_and_set_import_cache - pagesize: 4096, available bytes 7474089984, process usage 23117824 [09/Nov/2020:23:53:17.485270024 -0500] - INFO - check_and_set_import_cache - Import allocates 2919566KB import cache. [09/Nov/2020:23:53:17.815934099 -0500] - INFO - bdb_db2index - userRoot: Indexing attribute: cn [09/Nov/2020:23:53:17.822358991 -0500] - ERR - libdb - BDB1566 txn_checkpoint interface requires an environment configured for the transaction subsystem [09/Nov/2020:23:53:17.838104051 -0500] - ERR - bdb_force_checkpoint - Checkpoint FAILED, error Invalid argument (22) [09/Nov/2020:23:53:17.851552036 -0500] - INFO - bdb_db2index - userRoot: Finished indexing. [09/Nov/2020:23:53:17.931052293 -0500] - INFO - bdb_pre_close - All database threads now stopped -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:__init__.py:3014 Running script: ['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-standalone1/db/userRoot/cn.db'] [32mINFO [0m lib389:ticket48252_test.py:63 Did not found key test_user0 in dbscan output [32mINFO [0m lib389:__init__.py:3014 Running script: ['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-standalone1/db/userRoot/cn.db'] [32mINFO [0m lib389:ticket48252_test.py:63 Did not found key test_user0 in dbscan output | |||
Passed | tickets/ticket48252_test.py::test_ticket48252_run_1 | 5.02 | |
------------------------------Captured stderr call------------------------------ [09/Nov/2020:23:53:26.836110422 -0500] - INFO - slapd_exemode_db2index - Backend Instance: userRoot [09/Nov/2020:23:53:26.845844643 -0500] - INFO - ldbm_instance_config_cachememsize_set - force a minimal value 512000 [09/Nov/2020:23:53:26.858243463 -0500] - INFO - bdb_instance_start - Import is running with nsslapd-db-private-import-mem on; No other process is allowed to access the database [09/Nov/2020:23:53:26.868227014 -0500] - INFO - check_and_set_import_cache - pagesize: 4096, available bytes 7474335744, process usage 22892544 [09/Nov/2020:23:53:26.873419464 -0500] - INFO - check_and_set_import_cache - Import allocates 2919662KB import cache. [09/Nov/2020:23:53:27.133892130 -0500] - INFO - bdb_db2index - userRoot: Indexing attribute: objectclass [09/Nov/2020:23:53:27.138951574 -0500] - ERR - libdb - BDB1566 txn_checkpoint interface requires an environment configured for the transaction subsystem [09/Nov/2020:23:53:27.142428717 -0500] - ERR - bdb_force_checkpoint - Checkpoint FAILED, error Invalid argument (22) [09/Nov/2020:23:53:27.155461687 -0500] - INFO - bdb_db2index - userRoot: Finished indexing. [09/Nov/2020:23:53:27.194039691 -0500] - INFO - bdb_pre_close - All database threads now stopped | |||
Passed | tickets/ticket48265_test.py::test_ticket48265_test | 0.90 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48265_test:ticket48265_test.py:34 Adding 20 test entries... [32mINFO [0m tests.tickets.ticket48265_test:ticket48265_test.py:51 Search with Ticket 47521 type complex filter [32mINFO [0m tests.tickets.ticket48265_test:ticket48265_test.py:60 Search with Ticket 48265 type complex filter [32mINFO [0m tests.tickets.ticket48265_test:ticket48265_test.py:69 Test 48265 complete | |||
Passed | tickets/ticket48266_test.py::test_ticket48266_fractional | 20.98 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5c644bd3-9f3b-47de-aed5-ddc22a6e9a8a / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect d2a4f63a-0a0f-4910-9091-1f093b20bf25 / got description=5c644bd3-9f3b-47de-aed5-ddc22a6e9a8a) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect e6bd85ac-29b1-477d-ae7a-3e0adf9910a1 / got description=d2a4f63a-0a0f-4910-9091-1f093b20bf25) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect e6bd85ac-29b1-477d-ae7a-3e0adf9910a1 / got description=d2a4f63a-0a0f-4910-9091-1f093b20bf25) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect e6bd85ac-29b1-477d-ae7a-3e0adf9910a1 / got description=d2a4f63a-0a0f-4910-9091-1f093b20bf25) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect e6bd85ac-29b1-477d-ae7a-3e0adf9910a1 / got description=d2a4f63a-0a0f-4910-9091-1f093b20bf25) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working | |||
Passed | tickets/ticket48266_test.py::test_ticket48266_check_repl_desc | 1.14 | |
No log output captured. | |||
Passed | tickets/ticket48270_test.py::test_ticket48270_init | 0.23 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48270_test:ticket48270_test.py:26 Initialization: add dummy entries for the tests | |||
Passed | tickets/ticket48270_test.py::test_ticket48270_homeDirectory_indexed_cis | 2.04 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48270_test:ticket48270_test.py:39 index homeDirectory in caseIgnoreIA5Match and caseExactIA5Match [32mINFO [0m tests.tickets.ticket48270_test:ticket48270_test.py:57 successfully checked that filter with exact mr , a filter with lowercase eq is failing [32mINFO [0m lib389:tasks.py:798 Index task index_attrs_11092020_235502 completed successfully [32mINFO [0m tests.tickets.ticket48270_test:ticket48270_test.py:63 Check indexing succeeded with a specified matching rule | |||
Passed | tickets/ticket48270_test.py::test_ticket48270_homeDirectory_mixed_value | 0.03 | |
No log output captured. | |||
Passed | tickets/ticket48270_test.py::test_ticket48270_extensible_search | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48270_test:ticket48270_test.py:91 Default: can retrieve an entry filter syntax with exact stored value [32mINFO [0m tests.tickets.ticket48270_test:ticket48270_test.py:93 Default: can retrieve an entry filter caseExactIA5Match with exact stored value [32mINFO [0m tests.tickets.ticket48270_test:ticket48270_test.py:97 Default: can not retrieve an entry filter syntax match with lowered stored value [32mINFO [0m tests.tickets.ticket48270_test:ticket48270_test.py:103 Default: can not retrieve an entry filter caseExactIA5Match with lowered stored value [32mINFO [0m tests.tickets.ticket48270_test:ticket48270_test.py:110 Default: can retrieve an entry filter caseIgnoreIA5Match with lowered stored value | |||
Passed | tickets/ticket48272_test.py::test_ticket48272 | 10.55 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48272_test:ticket48272_test.py:129 Test PASSED | |||
Passed | tickets/ticket48294_test.py::test_48294_init | 0.10 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket48294_test.py:31 ############################################### [32mINFO [0m lib389:ticket48294_test.py:32 ####### Testing Ticket 48294 - Linked Attributes plug-in - won't update links after MODRDN operation [32mINFO [0m lib389:ticket48294_test.py:33 ############################################### | |||
Passed | tickets/ticket48294_test.py::test_48294_run_0 | 0.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket48294_test.py:31 ############################################### [32mINFO [0m lib389:ticket48294_test.py:32 ####### Case 0 - Rename employee1 and adjust the link type value by replace [32mINFO [0m lib389:ticket48294_test.py:33 ############################################### [32mINFO [0m lib389:ticket48294_test.py:59 ######################### MODRDN uid=employee2 ###################### | |||
Passed | tickets/ticket48294_test.py::test_48294_run_1 | 0.03 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket48294_test.py:31 ############################################### [32mINFO [0m lib389:ticket48294_test.py:32 ####### Case 1 - Rename employee2 and adjust the link type value by delete and add [32mINFO [0m lib389:ticket48294_test.py:33 ############################################### [32mINFO [0m lib389:ticket48294_test.py:59 ######################### MODRDN uid=employee3 ###################### | |||
Passed | tickets/ticket48294_test.py::test_48294_run_2 | 0.03 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket48294_test.py:31 ############################################### [32mINFO [0m lib389:ticket48294_test.py:32 ####### Case 2 - Rename manager1 to manager2 and make sure the managed attribute value is updated [32mINFO [0m lib389:ticket48294_test.py:33 ############################################### [32mINFO [0m lib389:ticket48294_test.py:59 ######################### MODRDN uid=manager2 ###################### | |||
Passed | tickets/ticket48295_test.py::test_48295_init | 0.30 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket48295_test.py:30 ############################################### [32mINFO [0m lib389:ticket48295_test.py:31 ####### Testing Ticket 48295 - Entry cache is not rolled back -- Linked Attributes plug-in - wrong behaviour when adding valid and broken links [32mINFO [0m lib389:ticket48295_test.py:32 ############################################### | |||
Passed | tickets/ticket48295_test.py::test_48295_run | 0.26 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket48295_test.py:30 ############################################### [32mINFO [0m lib389:ticket48295_test.py:31 ####### Add 2 linktypes to manager1 - one exists, another does not to make sure the managed entry does not have managed type. [32mINFO [0m lib389:ticket48295_test.py:32 ############################################### | |||
Passed | tickets/ticket48312_test.py::test_ticket48312 | 0.18 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48312_test:ticket48312_test.py:117 Test complete | |||
Passed | tickets/ticket48354_test.py::test_ticket48354 | 0.01 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48354_test:ticket48354_test.py:50 Test PASSED | |||
Passed | tickets/ticket48362_test.py::test_ticket48362 | 96.37 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b7ecaf8e-e8cf-4c9b-9de6-b7a128c93a11 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 4b17dfb7-c29a-4f5e-9198-87c5b40605c4 / got description=b7ecaf8e-e8cf-4c9b-9de6-b7a128c93a11) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48362_test:ticket48362_test.py:28 Add dna plugin config entry...ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 [32mINFO [0m tests.tickets.ticket48362_test:ticket48362_test.py:48 Enable the DNA plugin... [32mINFO [0m tests.tickets.ticket48362_test:ticket48362_test.py:55 Restarting the server... [32mINFO [0m tests.tickets.ticket48362_test:ticket48362_test.py:28 Add dna plugin config entry...ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m tests.tickets.ticket48362_test:ticket48362_test.py:48 Enable the DNA plugin... [32mINFO [0m tests.tickets.ticket48362_test:ticket48362_test.py:55 Restarting the server... [32mINFO [0m tests.tickets.ticket48362_test:ticket48362_test.py:83 ======================== Update dnaPortNum=39001 ============================ [32mINFO [0m tests.tickets.ticket48362_test:ticket48362_test.py:90 ======================== Update done [32mINFO [0m tests.tickets.ticket48362_test:ticket48362_test.py:83 ======================== Update dnaPortNum=39002 ============================ [32mINFO [0m tests.tickets.ticket48362_test:ticket48362_test.py:90 ======================== Update done [32mINFO [0m tests.tickets.ticket48362_test:ticket48362_test.py:132 ======================== BEFORE RESTART ============================ [32mINFO [0m tests.tickets.ticket48362_test:ticket48362_test.py:135 ======================== BEFORE RESTART ============================ [32mINFO [0m tests.tickets.ticket48362_test:ticket48362_test.py:141 ======================== BEFORE RESTART ============================ [32mINFO [0m tests.tickets.ticket48362_test:ticket48362_test.py:150 =================== AFTER RESTART ================================= [32mINFO [0m tests.tickets.ticket48362_test:ticket48362_test.py:153 =================== AFTER RESTART ================================= [32mINFO [0m tests.tickets.ticket48362_test:ticket48362_test.py:159 =================== AFTER RESTART ================================= [32mINFO [0m tests.tickets.ticket48362_test:ticket48362_test.py:162 Test complete | |||
Passed | tickets/ticket48366_test.py::test_ticket48366_init | 0.49 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket48366_test.py:44 Add subtree: ou=green,dc=example,dc=com [32mINFO [0m lib389:ticket48366_test.py:48 Add subtree: ou=red,dc=example,dc=com [32mINFO [0m lib389:ticket48366_test.py:54 Add cn=test,ou=people,dc=example,dc=com [32mINFO [0m lib389:ticket48366_test.py:60 Add cn=proxy,ou=people,dc=example,dc=com [32mINFO [0m lib389.utils:ticket48366_test.py:90 Adding %d test entries... | |||
Passed | tickets/ticket48366_test.py::test_ticket48366_search_user | 0.10 | |
No log output captured. | |||
Passed | tickets/ticket48366_test.py::test_ticket48366_search_dm | 0.06 | |
No log output captured. | |||
Passed | tickets/ticket48370_test.py::test_ticket48370 | 0.43 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48370_test:ticket48370_test.py:187 Test PASSED | |||
Passed | tickets/ticket48383_test.py::test_ticket48383 | 68.05 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. ------------------------------Captured stderr call------------------------------ ldiffile: /var/lib/dirsrv/slapd-standalone1/ldif/standalone1.ldif -------------------------------Captured log call-------------------------------- [31mCRITICAL[0m tests.tickets.ticket48383_test:ticket48383_test.py:62 Failed to change nsslapd-cachememsize No such object [32mINFO [0m tests.tickets.ticket48383_test:ticket48383_test.py:88 Test complete | |||
Passed | tickets/ticket48497_test.py::test_ticket48497_init | 0.89 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48497_test:ticket48497_test.py:26 Initialization: add dummy entries for the tests | |||
Passed | tickets/ticket48497_test.py::test_ticket48497_homeDirectory_mixed_value | 0.01 | |
No log output captured. | |||
Passed | tickets/ticket48497_test.py::test_ticket48497_extensible_search | 0.26 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48497_test:ticket48497_test.py:49 Default: can retrieve an entry filter syntax with exact stored value [32mINFO [0m tests.tickets.ticket48497_test:ticket48497_test.py:51 Default: can retrieve an entry filter caseExactIA5Match with exact stored value [32mINFO [0m tests.tickets.ticket48497_test:ticket48497_test.py:55 Default: can not retrieve an entry filter syntax match with lowered stored value [32mINFO [0m tests.tickets.ticket48497_test:ticket48497_test.py:61 Default: can not retrieve an entry filter caseExactIA5Match with lowered stored value [32mINFO [0m tests.tickets.ticket48497_test:ticket48497_test.py:68 Default: can retrieve an entry filter caseIgnoreIA5Match with lowered stored value | |||
Passed | tickets/ticket48497_test.py::test_ticket48497_homeDirectory_index_cfg | 0.04 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48497_test:ticket48497_test.py:73 index homeDirectory in caseIgnoreIA5Match and caseExactIA5Match | |||
Passed | tickets/ticket48497_test.py::test_ticket48497_homeDirectory_index_run | 2.03 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:tasks.py:798 Index task index_attrs_11102020_000427 completed successfully [32mINFO [0m tests.tickets.ticket48497_test:ticket48497_test.py:93 Check indexing succeeded with a specified matching rule | |||
Passed | tickets/ticket48665_test.py::test_ticket48665 | 0.08 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48665_test:ticket48665_test.py:40 8 entries are returned from the server. [31mCRITICAL[0m tests.tickets.ticket48665_test:ticket48665_test.py:47 Failed to change nsslapd-cachememsize No such object [32mINFO [0m tests.tickets.ticket48665_test:ticket48665_test.py:52 8 entries are returned from the server. [32mINFO [0m tests.tickets.ticket48665_test:ticket48665_test.py:63 8 entries are returned from the server. [32mINFO [0m tests.tickets.ticket48665_test:ticket48665_test.py:65 Test complete | |||
Passed | tickets/ticket48745_test.py::test_ticket48745_init | 0.21 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48745_test:ticket48745_test.py:26 Initialization: add dummy entries for the tests | |||
Passed | tickets/ticket48745_test.py::test_ticket48745_homeDirectory_indexed_cis | 2.04 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48745_test:ticket48745_test.py:39 index homeDirectory in caseIgnoreIA5Match and caseExactIA5Match [32mINFO [0m tests.tickets.ticket48745_test:ticket48745_test.py:57 successfully checked that filter with exact mr , a filter with lowercase eq is failing [32mINFO [0m lib389:tasks.py:798 Index task index_attrs_11102020_000516 completed successfully [32mINFO [0m tests.tickets.ticket48745_test:ticket48745_test.py:63 Check indexing succeeded with a specified matching rule | |||
Passed | tickets/ticket48745_test.py::test_ticket48745_homeDirectory_mixed_value | 0.03 | |
No log output captured. | |||
Passed | tickets/ticket48745_test.py::test_ticket48745_extensible_search_after_index | 0.01 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48745_test:ticket48745_test.py:91 Default: can retrieve an entry filter syntax with exact stored value [32mINFO [0m tests.tickets.ticket48745_test:ticket48745_test.py:99 Default: can retrieve an entry filter caseExactIA5Match with exact stored value [32mINFO [0m tests.tickets.ticket48745_test:ticket48745_test.py:106 Default: can not retrieve an entry filter syntax match with lowered stored value [32mINFO [0m tests.tickets.ticket48745_test:ticket48745_test.py:112 Default: can not retrieve an entry filter caseExactIA5Match with lowered stored value [32mINFO [0m tests.tickets.ticket48745_test:ticket48745_test.py:119 Default: can retrieve an entry filter caseIgnoreIA5Match with lowered stored value | |||
Passed | tickets/ticket48746_test.py::test_ticket48746_init | 0.42 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48746_test:ticket48746_test.py:26 Initialization: add dummy entries for the tests | |||
Passed | tickets/ticket48746_test.py::test_ticket48746_homeDirectory_indexed_cis | 2.06 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48746_test:ticket48746_test.py:39 index homeDirectory in caseIgnoreIA5Match and caseExactIA5Match [32mINFO [0m tests.tickets.ticket48746_test:ticket48746_test.py:57 successfully checked that filter with exact mr , a filter with lowercase eq is failing [32mINFO [0m lib389:tasks.py:798 Index task index_attrs_11102020_000533 completed successfully [32mINFO [0m tests.tickets.ticket48746_test:ticket48746_test.py:63 Check indexing succeeded with a specified matching rule | |||
Passed | tickets/ticket48746_test.py::test_ticket48746_homeDirectory_mixed_value | 0.01 | |
No log output captured. | |||
Passed | tickets/ticket48746_test.py::test_ticket48746_extensible_search_after_index | 0.00 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48746_test:ticket48746_test.py:99 Default: can retrieve an entry filter caseExactIA5Match with exact stored value | |||
Passed | tickets/ticket48746_test.py::test_ticket48746_homeDirectory_indexed_ces | 2.03 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48746_test:ticket48746_test.py:104 index homeDirectory in caseExactIA5Match, this would trigger the crash [32mINFO [0m tests.tickets.ticket48746_test:ticket48746_test.py:121 successfully checked that filter with exact mr , a filter with lowercase eq is failing [32mINFO [0m lib389:tasks.py:798 Index task index_attrs_11102020_000536 completed successfully [32mINFO [0m tests.tickets.ticket48746_test:ticket48746_test.py:127 Check indexing succeeded with a specified matching rule | |||
Passed | tickets/ticket48759_test.py::test_ticket48759 | 21.02 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket48759_test.py:66 !!!!!!! uid=member2,dc=example,dc=com: memberof->b'cn=group,dc=example,dc=com' [32mINFO [0m lib389:ticket48759_test.py:66 !!!!!!! uid=member2,dc=example,dc=com: memberof->b'cn=group,dc=example,dc=com' [32mINFO [0m lib389:ticket48759_test.py:66 !!!!!!! uid=member2,dc=example,dc=com: memberof->b'cn=group,dc=example,dc=com' | |||
Passed | tickets/ticket48799_test.py::test_ticket48799 | 15.45 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for consumer1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:169 Joining consumer consumer1 from master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 665d7110-16d0-4cc8-8764-fe9c12b2c042 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is working [32mINFO [0m lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 [32mINFO [0m lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 already exists -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48799_test:ticket48799_test.py:80 Test complete | |||
Passed | tickets/ticket48844_test.py::test_ticket48844_init | 1.04 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:backend.py:80 List backend with suffix=dc=bitwise,dc=com [32mINFO [0m lib389:backend.py:290 Creating a local backend [32mINFO [0m lib389:backend.py:76 List backend cn=TestBitw,cn=ldbm database,cn=plugins,cn=config [32mINFO [0m lib389:__init__.py:1713 Found entry dn: cn=TestBitw,cn=ldbm database,cn=plugins,cn=config cn: TestBitw nsslapd-cachememsize: 512000 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-standalone1/db/TestBitw nsslapd-dncachememsize: 16777216 nsslapd-readonly: off nsslapd-require-index: off nsslapd-require-internalop-index: off nsslapd-suffix: dc=bitwise,dc=com objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance [32mINFO [0m lib389:mappingTree.py:154 Entry dn: cn="dc=bitwise,dc=com",cn=mapping tree,cn=config cn: dc=bitwise,dc=com nsslapd-backend: TestBitw nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree [32mINFO [0m lib389:__init__.py:1713 Found entry dn: cn=dc\3Dbitwise\2Cdc\3Dcom,cn=mapping tree,cn=config cn: dc=bitwise,dc=com nsslapd-backend: TestBitw nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree | |||
Passed | tickets/ticket48844_test.py::test_ticket48844_bitwise_on | 4.15 | |
No log output captured. | |||
Passed | tickets/ticket48844_test.py::test_ticket48844_bitwise_off | 4.87 | |
No log output captured. | |||
Passed | tickets/ticket48891_test.py::test_ticket48891_setup | 2.39 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket48891_test.py:43 Bind as cn=Directory Manager [32mINFO [0m lib389:ticket48891_test.py:52 ######################### SETUP SUFFIX o=ticket48891.org ###################### [32mINFO [0m lib389:backend.py:80 List backend with suffix=dc=ticket48891.org [32mINFO [0m lib389:backend.py:290 Creating a local backend [32mINFO [0m lib389:backend.py:76 List backend cn=ticket48891,cn=ldbm database,cn=plugins,cn=config [32mINFO [0m lib389:__init__.py:1713 Found entry dn: cn=ticket48891,cn=ldbm database,cn=plugins,cn=config cn: ticket48891 nsslapd-cachememsize: 512000 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-standalone1/db/ticket48891 nsslapd-dncachememsize: 16777216 nsslapd-readonly: off nsslapd-require-index: off nsslapd-require-internalop-index: off nsslapd-suffix: dc=ticket48891.org objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance [32mINFO [0m lib389:mappingTree.py:154 Entry dn: cn="dc=ticket48891.org",cn=mapping tree,cn=config cn: dc=ticket48891.org nsslapd-backend: ticket48891 nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree [32mINFO [0m lib389:__init__.py:1713 Found entry dn: cn=dc\3Dticket48891.org,cn=mapping tree,cn=config cn: dc=ticket48891.org nsslapd-backend: ticket48891 nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree [32mINFO [0m lib389:ticket48891_test.py:61 ######################### Generate Test data ###################### [32mINFO [0m lib389:ticket48891_test.py:77 ######################### SEARCH ALL ###################### [32mINFO [0m lib389:ticket48891_test.py:78 Bind as cn=Directory Manager and add the READ/SEARCH SELFDN aci [32mINFO [0m lib389:ticket48891_test.py:82 Returned 10 entries. [32mINFO [0m lib389:ticket48891_test.py:86 10 person entries are successfully created under dc=ticket48891.org. | |||
Passed | tickets/ticket48893_test.py::test_ticket48893 | 0.01 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48893_test:ticket48893_test.py:46 Test PASSED | |||
Passed | tickets/ticket48906_test.py::test_ticket48906_setup | 0.14 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket48906_test.py:63 Bind as cn=Directory Manager [32mINFO [0m lib389:ticket48906_test.py:83 ######################### SEARCH ALL ###################### [32mINFO [0m lib389:ticket48906_test.py:84 Bind as cn=Directory Manager and add the READ/SEARCH SELFDN aci [32mINFO [0m lib389:ticket48906_test.py:88 Returned 10 entries. [32mINFO [0m lib389:ticket48906_test.py:92 10 person entries are successfully created under dc=example,dc=com. | |||
Passed | tickets/ticket48906_test.py::test_ticket48906_dblock_default | 0.02 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket48906_test.py:149 ################################### [32mINFO [0m lib389:ticket48906_test.py:150 ### [32mINFO [0m lib389:ticket48906_test.py:151 ### Check that before any change config/monitor [32mINFO [0m lib389:ticket48906_test.py:152 ### contains the default value [32mINFO [0m lib389:ticket48906_test.py:153 ### [32mINFO [0m lib389:ticket48906_test.py:154 ################################### | |||
Passed | tickets/ticket48906_test.py::test_ticket48906_dblock_ldap_update | 3.65 | |
------------------------------Captured stdout call------------------------------ line locks:10000 expected_value 10000 value 10000 -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket48906_test.py:160 ################################### [32mINFO [0m lib389:ticket48906_test.py:161 ### [32mINFO [0m lib389:ticket48906_test.py:162 ### Check that after ldap update [32mINFO [0m lib389:ticket48906_test.py:163 ### - monitor contains DEFAULT [32mINFO [0m lib389:ticket48906_test.py:164 ### - configured contains DBLOCK_LDAP_UPDATE [32mINFO [0m lib389:ticket48906_test.py:165 ### - After stop dse.ldif contains DBLOCK_LDAP_UPDATE [32mINFO [0m lib389:ticket48906_test.py:166 ### - After stop guardian contains DEFAULT [32mINFO [0m lib389:ticket48906_test.py:167 ### In fact guardian should differ from config to recreate the env [32mINFO [0m lib389:ticket48906_test.py:168 ### Check that after restart (DBenv recreated) [32mINFO [0m lib389:ticket48906_test.py:169 ### - monitor contains DBLOCK_LDAP_UPDATE [32mINFO [0m lib389:ticket48906_test.py:170 ### - configured contains DBLOCK_LDAP_UPDATE [32mINFO [0m lib389:ticket48906_test.py:171 ### - dse.ldif contains DBLOCK_LDAP_UPDATE [32mINFO [0m lib389:ticket48906_test.py:172 ### [32mINFO [0m lib389:ticket48906_test.py:173 ################################### | |||
Passed | tickets/ticket48906_test.py::test_ticket48906_dblock_edit_update | 7.03 | |
------------------------------Captured stdout call------------------------------ line locks:20000 expected_value 20000 value 20000 line locks:40000 expected_value 40000 value 40000 -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket48906_test.py:191 ################################### [32mINFO [0m lib389:ticket48906_test.py:192 ### [32mINFO [0m lib389:ticket48906_test.py:193 ### Check that after stop [32mINFO [0m lib389:ticket48906_test.py:194 ### - dse.ldif contains DBLOCK_LDAP_UPDATE [32mINFO [0m lib389:ticket48906_test.py:195 ### - guardian contains DBLOCK_LDAP_UPDATE [32mINFO [0m lib389:ticket48906_test.py:196 ### Check that edit dse+restart [32mINFO [0m lib389:ticket48906_test.py:197 ### - monitor contains DBLOCK_EDIT_UPDATE [32mINFO [0m lib389:ticket48906_test.py:198 ### - configured contains DBLOCK_EDIT_UPDATE [32mINFO [0m lib389:ticket48906_test.py:199 ### Check that after stop [32mINFO [0m lib389:ticket48906_test.py:200 ### - dse.ldif contains DBLOCK_EDIT_UPDATE [32mINFO [0m lib389:ticket48906_test.py:201 ### - guardian contains DBLOCK_EDIT_UPDATE [32mINFO [0m lib389:ticket48906_test.py:202 ### [32mINFO [0m lib389:ticket48906_test.py:203 ################################### | |||
Passed | tickets/ticket48906_test.py::test_ticket48906_dblock_robust | 6.03 | |
------------------------------Captured stdout call------------------------------ line locks:40000 expected_value 40000 value 40000 -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:ticket48906_test.py:245 ################################### [32mINFO [0m lib389:ticket48906_test.py:246 ### [32mINFO [0m lib389:ticket48906_test.py:247 ### Check that the following values are rejected [32mINFO [0m lib389:ticket48906_test.py:248 ### - negative value [32mINFO [0m lib389:ticket48906_test.py:249 ### - insuffisant value [32mINFO [0m lib389:ticket48906_test.py:250 ### - invalid value [32mINFO [0m lib389:ticket48906_test.py:251 ### Check that minimum value is accepted [32mINFO [0m lib389:ticket48906_test.py:252 ### [32mINFO [0m lib389:ticket48906_test.py:253 ################################### | |||
Passed | tickets/ticket48944_test.py::test_ticket48944 | 111.87 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for consumer1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for consumer2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39202, 'ldap-secureport': 63902, 'server-id': 'consumer2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 6028636c-0e2f-40d9-9962-603f7a98b54c / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 586dfeb1-c9f4-48e8-9a8c-7d6e39dbf206 / got description=6028636c-0e2f-40d9-9962-603f7a98b54c) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:169 Joining consumer consumer1 from master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 26774b40-a422-4847-83a6-82ffd536f63e / got description=586dfeb1-c9f4-48e8-9a8c-7d6e39dbf206) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is working [32mINFO [0m lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 [32mINFO [0m lib389.topologies:topologies.py:169 Joining consumer consumer2 from master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect 663bcdc7-9053-49d5-a5c0-b23411adbe46 / got description=26774b40-a422-4847-83a6-82ffd536f63e) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 is working [32mINFO [0m lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 [32mINFO [0m lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 already exists [32mINFO [0m lib389.topologies:topologies.py:174 Ensuring consumer consumer2 from master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 already exists [32mINFO [0m lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from master2 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is was created [32mINFO [0m lib389.topologies:topologies.py:174 Ensuring consumer consumer2 from master2 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39202 is was created -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket48944_test:ticket48944_test.py:108 Ticket 48944 - On a read only replica invalid state info can accumulate [32mINFO [0m tests.tickets.ticket48944_test:ticket48944_test.py:52 Enable account policy plugin and configure required attributes [32mINFO [0m tests.tickets.ticket48944_test:ticket48944_test.py:54 Configure Account policy plugin on master1 [32mINFO [0m tests.tickets.ticket48944_test:ticket48944_test.py:52 Enable account policy plugin and configure required attributes [32mINFO [0m tests.tickets.ticket48944_test:ticket48944_test.py:54 Configure Account policy plugin on master2 [32mINFO [0m tests.tickets.ticket48944_test:ticket48944_test.py:52 Enable account policy plugin and configure required attributes [32mINFO [0m tests.tickets.ticket48944_test:ticket48944_test.py:69 Configure Account policy plugin on consumer1 [32mINFO [0m tests.tickets.ticket48944_test:ticket48944_test.py:52 Enable account policy plugin and configure required attributes [32mINFO [0m tests.tickets.ticket48944_test:ticket48944_test.py:69 Configure Account policy plugin on consumer2 [32mINFO [0m tests.tickets.ticket48944_test:ticket48944_test.py:115 Sleep for 10secs for the server to come up [32mINFO [0m tests.tickets.ticket48944_test:ticket48944_test.py:117 Add few entries to server and check if entries are replicated [32mINFO [0m tests.tickets.ticket48944_test:ticket48944_test.py:132 Checking if entries are synced across masters and consumers [32mINFO [0m tests.tickets.ticket48944_test:ticket48944_test.py:152 Start master2 to sync lastLoginTime attribute from master1 [32mINFO [0m tests.tickets.ticket48944_test:ticket48944_test.py:155 Stop master1 [32mINFO [0m tests.tickets.ticket48944_test:ticket48944_test.py:157 Bind as user1 to master2 and check if lastLoginTime attribute is greater than master1 [32mINFO [0m tests.tickets.ticket48944_test:ticket48944_test.py:161 Start all servers except master1 [32mINFO [0m tests.tickets.ticket48944_test:ticket48944_test.py:167 Check if consumers are updated with lastLoginTime attribute value from master2 [32mINFO [0m tests.tickets.ticket48944_test:ticket48944_test.py:174 Check if lastLoginTime update in consumers not synced to master2 [32mINFO [0m tests.tickets.ticket48944_test:ticket48944_test.py:185 Start master1 and check if its updating its older lastLoginTime attribute to consumers [32mINFO [0m tests.tickets.ticket48944_test:ticket48944_test.py:194 Check if lastLoginTime update from master2 is synced to all masters and consumers [32mINFO [0m tests.tickets.ticket48944_test:ticket48944_test.py:202 Checking consumer error logs for replica invalid state info | |||
Passed | tickets/ticket49008_test.py::test_ticket49008 | 52.31 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master3 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'master3', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect a37117b7-c8e8-4580-afbb-72cc65714d5f / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 28ca04bd-c3c2-40c1-8599-aceb5a77346a / got description=a37117b7-c8e8-4580-afbb-72cc65714d5f) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master3 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 55233153-f749-40ec-90dd-83644c3084ea / got description=28ca04bd-c3c2-40c1-8599-aceb5a77346a) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect d06f701e-25d6-4eae-98ec-cb73a81350c9 / got description=55233153-f749-40ec-90dd-83644c3084ea) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master3 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master3 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master2 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49008_test:ticket49008_test.py:115 ruv before fail: b'{replica 2 ldap://localhost.localdomain:39002} 5faa224a000100020000 5faa2279000300020000' [32mINFO [0m tests.tickets.ticket49008_test:ticket49008_test.py:116 ruv after fail: b'{replica 2 ldap://localhost.localdomain:39002} 5faa224a000100020000 5faa2279000300020000' | |||
Passed | tickets/ticket49020_test.py::test_ticket49020 | 34.53 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master3 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'master3', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect e8dd6799-843c-4637-8025-e5efa6a1b868 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 370ad2c2-cd29-4bce-814a-1cd186c1fe28 / got description=e8dd6799-843c-4637-8025-e5efa6a1b868) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master3 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect c85442f9-e349-4260-b5a2-1a75b92cbe1c / got description=370ad2c2-cd29-4bce-814a-1cd186c1fe28) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 0cba26b4-3d4d-446e-a523-bb9ecf28f60d / got description=c85442f9-e349-4260-b5a2-1a75b92cbe1c) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master3 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master3 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master2 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created -------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1169 Starting total init cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config | |||
Passed | tickets/ticket49076_test.py::test_ticket49076 | 10.02 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | tickets/ticket49095_test.py::test_ticket49095 | 0.35 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket49095_test:ticket49095_test.py:79 Test Passed | |||
Passed | tickets/ticket49104_test.py::test_ticket49104 | 0.40 | |
------------------------------Captured stderr call------------------------------ valgrind: /usr/bin/dbscan-bin: No such file or directory grep: /tmp/val49104.out: No such file or directory valgrind: /usr/bin/dbscan-bin: No such file or directory grep: /tmp/val49104.out: No such file or directory valgrind: /usr/bin/dbscan-bin: No such file or directory grep: /tmp/val49104.out: No such file or directory valgrind: /usr/bin/dbscan-bin: No such file or directory grep: /tmp/val49104.out: No such file or directory valgrind: /usr/bin/dbscan-bin: No such file or directory grep: /tmp/val49104.out: No such file or directory valgrind: /usr/bin/dbscan-bin: No such file or directory grep: /tmp/val49104.out: No such file or directory valgrind: /usr/bin/dbscan-bin: No such file or directory grep: /tmp/val49104.out: No such file or directory valgrind: /usr/bin/dbscan-bin: No such file or directory grep: /tmp/val49104.out: No such file or directory valgrind: /usr/bin/dbscan-bin: No such file or directory grep: /tmp/val49104.out: No such file or directory valgrind: /usr/bin/dbscan-bin: No such file or directory grep: /tmp/val49104.out: No such file or directory | |||
Passed | tickets/ticket49122_test.py::test_ticket49122 | 14.73 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket49122_test:ticket49122_test.py:57 Testing filter: nsrole=cn=empty,dc=example,dc=com [32mINFO [0m tests.tickets.ticket49122_test:ticket49122_test.py:57 Testing filter: (nsrole=cn=empty,dc=example,dc=com) [32mINFO [0m tests.tickets.ticket49122_test:ticket49122_test.py:57 Testing filter: (&(nsrole=cn=empty,dc=example,dc=com)) [32mINFO [0m tests.tickets.ticket49122_test:ticket49122_test.py:57 Testing filter: (!(nsrole=cn=empty,dc=example,dc=com)) [32mINFO [0m tests.tickets.ticket49122_test:ticket49122_test.py:57 Testing filter: (&(|(objectclass=person)(sn=app*))(userpassword=*)) [32mINFO [0m tests.tickets.ticket49122_test:ticket49122_test.py:57 Testing filter: (&(|(objectclass=person)(nsrole=cn=empty,dc=example,dc=com))(userpassword=*)) [32mINFO [0m tests.tickets.ticket49122_test:ticket49122_test.py:57 Testing filter: (&(|(nsrole=cn=empty,dc=example,dc=com)(sn=app*))(userpassword=*)) [32mINFO [0m tests.tickets.ticket49122_test:ticket49122_test.py:57 Testing filter: (&(|(objectclass=person)(sn=app*))(nsrole=cn=empty,dc=example,dc=com)) [32mINFO [0m tests.tickets.ticket49122_test:ticket49122_test.py:57 Testing filter: (&(|(&(cn=*)(objectclass=person)(nsrole=cn=empty,dc=example,dc=com)))(uid=*)) [32mINFO [0m lib389:ticket49122_test.py:86 Test Passed | |||
Passed | tickets/ticket49180_test.py::test_ticket49180 | 51.65 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master3 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'master3', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master4 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39004, 'ldap-secureport': 63704, 'server-id': 'master4', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 915c8c88-b451-4bb9-83d0-664d2fbcbee6 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 334b6d5b-fef3-47da-8624-4c5539a98b27 / got description=915c8c88-b451-4bb9-83d0-664d2fbcbee6) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master3 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect a0e7abb0-0b7e-4698-b699-a6143fc0eac3 / got description=334b6d5b-fef3-47da-8624-4c5539a98b27) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 3a6d6e3b-dec9-45b7-bcae-ade2190deffb / got description=a0e7abb0-0b7e-4698-b699-a6143fc0eac3) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master4 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 3ed17db3-dbce-431a-a815-9477d039e36e / got description=3a6d6e3b-dec9-45b7-bcae-ade2190deffb) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ea89ff83-16d1-403f-a0af-18a373e91e86 / got description=3ed17db3-dbce-431a-a815-9477d039e36e) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master3 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master4 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master3 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master4 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master2 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master4 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master4 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master4 to master2 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master4 to master3 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket49180_test:ticket49180_test.py:78 Running test_ticket49180... [32mINFO [0m tests.tickets.ticket49180_test:ticket49180_test.py:80 Check that replication works properly on all masters [32mINFO [0m tests.tickets.ticket49180_test:ticket49180_test.py:95 test_clean: disable master 4... [32mINFO [0m tests.tickets.ticket49180_test:ticket49180_test.py:30 test_clean: remove all the agreements to master 4... [32mINFO [0m lib389:agreement.py:1095 Agreement (cn=004,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config) was successfully removed [32mINFO [0m lib389:agreement.py:1095 Agreement (cn=004,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config) was successfully removed [32mINFO [0m lib389:agreement.py:1095 Agreement (cn=004,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config) was successfully removed [32mINFO [0m tests.tickets.ticket49180_test:ticket49180_test.py:46 Restoring master 4... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 9b24c7d0-9380-438e-b26a-42100745aa7c / got description=ea89ff83-16d1-403f-a0af-18a373e91e86) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 759cfc5c-9ad2-44f0-8707-6997d0c03b30 / got description=9b24c7d0-9380-438e-b26a-42100745aa7c) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39004 already exists [32mINFO [0m tests.tickets.ticket49180_test:ticket49180_test.py:59 Replication is working m1 -> m2. [32mINFO [0m tests.tickets.ticket49180_test:ticket49180_test.py:59 Replication is working m1 -> m3. [32mINFO [0m tests.tickets.ticket49180_test:ticket49180_test.py:59 Replication is working m1 -> m4. [32mINFO [0m tests.tickets.ticket49180_test:ticket49180_test.py:67 Replication is working m4 -> m1. [32mINFO [0m tests.tickets.ticket49180_test:ticket49180_test.py:73 Master 4 has been successfully restored. [32mINFO [0m tests.tickets.ticket49180_test:ticket49180_test.py:106 Errors found on m1: 0 [32mINFO [0m tests.tickets.ticket49180_test:ticket49180_test.py:111 Errors found on m2: 0 [32mINFO [0m tests.tickets.ticket49180_test:ticket49180_test.py:116 Errors found on m3: 0 | |||
Passed | tickets/ticket49184_test.py::test_ticket49184 | 5.06 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket49184_test:ticket49184_test.py:89 create users and group... [32mINFO [0m tests.tickets.ticket49184_test:ticket49184_test.py:39 Adding members to the group... [32mINFO [0m tests.tickets.ticket49184_test:ticket49184_test.py:39 Adding members to the group... | |||
Passed | tickets/ticket49227_test.py::test_ticket49227 | 29.90 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | tickets/ticket49249_test.py::test_ticket49249 | 0.17 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | tickets/ticket49273_test.py::test_49273_corrupt_dbversion | 3.98 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | tickets/ticket49290_test.py::test_49290_range_unindexed_notes | 4.92 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. ------------------------------Captured stderr call------------------------------ [10/Nov/2020:00:28:02.862815859 -0500] - INFO - ldbm_instance_config_cachememsize_set - force a minimal value 512000 [10/Nov/2020:00:28:02.880670866 -0500] - INFO - bdb_instance_start - Import is running with nsslapd-db-private-import-mem on; No other process is allowed to access the database [10/Nov/2020:00:28:02.884993794 -0500] - INFO - check_and_set_import_cache - pagesize: 4096, available bytes 7466905600, process usage 22999040 [10/Nov/2020:00:28:02.889549081 -0500] - INFO - check_and_set_import_cache - Import allocates 2916760KB import cache. [10/Nov/2020:00:28:03.228968563 -0500] - INFO - bdb_db2index - userRoot: Indexing attribute: modifytimestamp [10/Nov/2020:00:28:03.235944506 -0500] - ERR - libdb - BDB1566 txn_checkpoint interface requires an environment configured for the transaction subsystem [10/Nov/2020:00:28:03.240848022 -0500] - ERR - bdb_force_checkpoint - Checkpoint FAILED, error Invalid argument (22) [10/Nov/2020:00:28:03.258522923 -0500] - INFO - bdb_db2index - userRoot: Finished indexing. [10/Nov/2020:00:28:03.291393662 -0500] - INFO - bdb_pre_close - All database threads now stopped | |||
Passed | tickets/ticket49386_test.py::test_ticket49386 | 33.22 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [31mCRITICAL[0m tests.tickets.ticket49386_test:ticket49386_test.py:28 Adding user (cn=user_0,ou=people,dc=example,dc=com): [31mCRITICAL[0m tests.tickets.ticket49386_test:ticket49386_test.py:28 Adding user (cn=user_1,ou=people,dc=example,dc=com): [31mCRITICAL[0m tests.tickets.ticket49386_test:ticket49386_test.py:28 Adding user (cn=user_2,ou=people,dc=example,dc=com): [31mCRITICAL[0m tests.tickets.ticket49386_test:ticket49386_test.py:28 Adding user (cn=user_3,ou=people,dc=example,dc=com): [31mCRITICAL[0m tests.tickets.ticket49386_test:ticket49386_test.py:28 Adding user (cn=user_4,ou=people,dc=example,dc=com): [31mCRITICAL[0m tests.tickets.ticket49386_test:ticket49386_test.py:28 Adding user (cn=user_5,ou=people,dc=example,dc=com): [31mCRITICAL[0m tests.tickets.ticket49386_test:ticket49386_test.py:28 Adding user (cn=user_6,ou=people,dc=example,dc=com): [31mCRITICAL[0m tests.tickets.ticket49386_test:ticket49386_test.py:28 Adding user (cn=user_7,ou=people,dc=example,dc=com): [31mCRITICAL[0m tests.tickets.ticket49386_test:ticket49386_test.py:28 Adding user (cn=user_8,ou=people,dc=example,dc=com): [31mCRITICAL[0m tests.tickets.ticket49386_test:ticket49386_test.py:28 Adding user (cn=user_9,ou=people,dc=example,dc=com): [32mINFO [0m lib389:ticket49386_test.py:65 !!!!!!! cn=user_1,ou=people,dc=example,dc=com: memberof->b'cn=group_1,ou=groups,dc=example,dc=com' [32mINFO [0m lib389:ticket49386_test.py:66 !!!!!!! b'cn=group_1,ou=groups,dc=example,dc=com' [32mINFO [0m lib389:ticket49386_test.py:67 !!!!!!! cn=group_1,ou=groups,dc=example,dc=com [32mINFO [0m tests.tickets.ticket49386_test:ticket49386_test.py:130 memberof log found: [10/Nov/2020:00:29:23.982632976 -0500] - DEBUG - memberof-plugin - memberof_postop_modrdn: Skip modrdn operation because src/dst identical cn=group_1,ou=groups,dc=example,dc=com | |||
Passed | tickets/ticket49441_test.py::test_ticket49441 | 8.22 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket49441_test:ticket49441_test.py:35 Position ldif files, and add indexes... [32mINFO [0m tests.tickets.ticket49441_test:ticket49441_test.py:52 Import LDIF with large indexed binary attributes... [31m[1mERROR [0m lib389:tasks.py:495 Error: import task import_11102020_003015 for file /var/lib/dirsrv/slapd-standalone1/ldifbinary.ldif exited with -23 [32mINFO [0m tests.tickets.ticket49441_test:ticket49441_test.py:61 Verify server is still running... [32mINFO [0m tests.tickets.ticket49441_test:ticket49441_test.py:68 Test PASSED | |||
Passed | tickets/ticket49460_test.py::test_ticket_49460 | 15.41 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master3 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'master3', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 73890abf-3cd3-41e1-98ce-55f150e3f820 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 63c394aa-a00e-4fb0-93e5-d49244f81bb0 / got description=73890abf-3cd3-41e1-98ce-55f150e3f820) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master3 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect b241d1b6-958d-4c07-b99f-0018e499ab5d / got description=63c394aa-a00e-4fb0-93e5-d49244f81bb0) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 8435666b-53bf-468d-a534-7ed1fd96440c / got description=b241d1b6-958d-4c07-b99f-0018e499ab5d) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master3 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master3 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master2 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created -------------------------------Captured log call-------------------------------- [31mCRITICAL[0m tests.tickets.ticket49460_test:ticket49460_test.py:26 Adding user (cn=user11,ou=people,dc=example,dc=com): [31mCRITICAL[0m tests.tickets.ticket49460_test:ticket49460_test.py:26 Adding user (cn=user21,ou=people,dc=example,dc=com): [31mCRITICAL[0m tests.tickets.ticket49460_test:ticket49460_test.py:26 Adding user (cn=user31,ou=people,dc=example,dc=com): | |||
Passed | tickets/ticket49471_test.py::test_ticket49471 | 2.07 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [31mCRITICAL[0m tests.tickets.ticket49471_test:ticket49471_test.py:28 Adding user (cn=user_1,ou=people,dc=example,dc=com): | |||
Passed | tickets/ticket49540_test.py::test_ticket49540 | 23.34 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket49540_test:ticket49540_test.py:28 index homeDirectory [32mINFO [0m lib389:tasks.py:798 Index task index_attrs_11102020_003904 completed successfully [32mINFO [0m tests.tickets.ticket49540_test:ticket49540_test.py:62 check_task_status =========> 0 th loop [32mINFO [0m tests.tickets.ticket49540_test:ticket49540_test.py:71 cn=index_attrs_11102020_003904,cn=index,cn=tasks,cn=config ---> NO STATUS [32mINFO [0m tests.tickets.ticket49540_test:ticket49540_test.py:83 =========> Great it was expected in the middle of index [32mINFO [0m tests.tickets.ticket49540_test:ticket49540_test.py:62 check_task_status =========> 1 th loop [32mINFO [0m tests.tickets.ticket49540_test:ticket49540_test.py:71 cn=index_attrs_11102020_003904,cn=index,cn=tasks,cn=config ---> NO STATUS [32mINFO [0m tests.tickets.ticket49540_test:ticket49540_test.py:83 =========> Great it was expected in the middle of index [32mINFO [0m tests.tickets.ticket49540_test:ticket49540_test.py:62 check_task_status =========> 2 th loop [32mINFO [0m tests.tickets.ticket49540_test:ticket49540_test.py:71 cn=index_attrs_11102020_003904,cn=index,cn=tasks,cn=config ---> NO STATUS [32mINFO [0m tests.tickets.ticket49540_test:ticket49540_test.py:83 =========> Great it was expected in the middle of index [32mINFO [0m tests.tickets.ticket49540_test:ticket49540_test.py:62 check_task_status =========> 3 th loop [32mINFO [0m tests.tickets.ticket49540_test:ticket49540_test.py:71 cn=index_attrs_11102020_003904,cn=index,cn=tasks,cn=config ---> NO STATUS [32mINFO [0m tests.tickets.ticket49540_test:ticket49540_test.py:83 =========> Great it was expected in the middle of index [32mINFO [0m tests.tickets.ticket49540_test:ticket49540_test.py:62 check_task_status =========> 4 th loop [32mINFO [0m tests.tickets.ticket49540_test:ticket49540_test.py:71 cn=index_attrs_11102020_003904,cn=index,cn=tasks,cn=config ---> NO STATUS [32mINFO [0m tests.tickets.ticket49540_test:ticket49540_test.py:83 =========> Great it was expected in the middle of index [32mINFO [0m tests.tickets.ticket49540_test:ticket49540_test.py:62 check_task_status =========> 5 th loop [32mINFO [0m tests.tickets.ticket49540_test:ticket49540_test.py:71 cn=index_attrs_11102020_003904,cn=index,cn=tasks,cn=config ---> NO STATUS [32mINFO [0m tests.tickets.ticket49540_test:ticket49540_test.py:83 =========> Great it was expected in the middle of index [32mINFO [0m tests.tickets.ticket49540_test:ticket49540_test.py:62 check_task_status =========> 6 th loop [32mINFO [0m tests.tickets.ticket49540_test:ticket49540_test.py:71 cn=index_attrs_11102020_003904,cn=index,cn=tasks,cn=config ---> NO STATUS [32mINFO [0m tests.tickets.ticket49540_test:ticket49540_test.py:83 =========> Great it was expected in the middle of index [32mINFO [0m tests.tickets.ticket49540_test:ticket49540_test.py:62 check_task_status =========> 7 th loop [32mINFO [0m tests.tickets.ticket49540_test:ticket49540_test.py:71 cn=index_attrs_11102020_003904,cn=index,cn=tasks,cn=config ---> NO STATUS [32mINFO [0m tests.tickets.ticket49540_test:ticket49540_test.py:83 =========> Great it was expected in the middle of index [32mINFO [0m tests.tickets.ticket49540_test:ticket49540_test.py:62 check_task_status =========> 8 th loop [32mINFO [0m tests.tickets.ticket49540_test:ticket49540_test.py:71 cn=index_attrs_11102020_003904,cn=index,cn=tasks,cn=config ---> NO STATUS [32mINFO [0m tests.tickets.ticket49540_test:ticket49540_test.py:83 =========> Great it was expected in the middle of index [32mINFO [0m tests.tickets.ticket49540_test:ticket49540_test.py:62 check_task_status =========> 9 th loop [32mINFO [0m tests.tickets.ticket49540_test:ticket49540_test.py:68 cn=index_attrs_11102020_003904,cn=index,cn=tasks,cn=config ---> b'userRoot: Finished indexing.' | |||
Passed | tickets/ticket49623_2_test.py::test_modrdn_loop | 0.12 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket49623_2_test:ticket49623_2_test.py:64 Check the log messages for cenotaph error | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_init | 6.96 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master2 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master3 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'master3', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:142 Creating replication topology. [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 59810b53-d5dc-4e1d-9a99-c73ee6b86f53 / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 8d0e0181-5650-4bf4-85a9-bd79a0efac68 / got description=59810b53-d5dc-4e1d-9a99-c73ee6b86f53) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 [32mINFO [0m lib389.topologies:topologies.py:156 Joining master master3 to master1 ... [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 7a877164-e08b-47a9-8336-7fedb5eab488 / got description=8d0e0181-5650-4bf4-85a9-bd79a0efac68) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 7a877164-e08b-47a9-8336-7fedb5eab488 / got description=8d0e0181-5650-4bf4-85a9-bd79a0efac68) [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 7a877164-e08b-47a9-8336-7fedb5eab488 / got description=8d0e0181-5650-4bf4-85a9-bd79a0efac68) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is working [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect f7024e0d-0649-467b-b658-b47ccd50ae8e / got description=7a877164-e08b-47a9-8336-7fedb5eab488) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 is working [32mINFO [0m lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master1 to master3 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master2 to master3 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 is was created [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master1 ... [32mINFO [0m lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 already exists [32mINFO [0m lib389.topologies:topologies.py:164 Ensuring master master3 to master2 ... [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39002 is was created -------------------------------Captured log call-------------------------------- [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=0,ou=distinguished,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=0,ou=distinguished,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=1,ou=distinguished,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=1,ou=distinguished,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=2,ou=distinguished,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=2,ou=distinguished,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=3,ou=distinguished,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=3,ou=distinguished,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=4,ou=distinguished,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=4,ou=distinguished,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=5,ou=distinguished,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=5,ou=distinguished,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=6,ou=distinguished,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=6,ou=distinguished,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=7,ou=distinguished,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=7,ou=distinguished,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=8,ou=distinguished,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=8,ou=distinguished,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=9,ou=distinguished,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=9,ou=distinguished,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=10,ou=distinguished,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=10,ou=distinguished,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=11,ou=distinguished,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=11,ou=distinguished,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=12,ou=distinguished,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=12,ou=distinguished,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=13,ou=distinguished,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=13,ou=distinguished,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=14,ou=distinguished,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=14,ou=distinguished,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=15,ou=distinguished,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=15,ou=distinguished,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=16,ou=distinguished,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=16,ou=distinguished,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=17,ou=distinguished,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=17,ou=distinguished,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=18,ou=distinguished,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=18,ou=distinguished,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=19,ou=distinguished,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=19,ou=distinguished,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_0,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_0,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_1,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_1,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_2,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_2,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_3,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_3,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_4,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_4,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_5,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_5,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_6,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_6,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_7,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_7,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_8,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_8,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_9,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_9,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_10,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_10,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_11,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_11,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_12,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_12,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_13,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_13,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_14,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_14,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_15,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_15,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_16,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_16,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_17,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_17,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_18,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_18,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_19,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_19,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_20,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_20,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_21,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_21,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_22,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_22,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_23,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_23,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_24,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_24,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_25,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_25,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_26,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_26,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_27,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_27,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_28,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_28,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_29,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_29,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_30,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_30,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_31,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_31,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_32,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_32,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_33,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_33,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_34,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_34,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_35,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_35,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_36,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_36,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_37,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_37,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_38,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_38,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_39,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_39,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_40,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_40,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_41,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_41,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_42,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_42,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_43,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_43,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_44,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_44,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_45,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_45,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_46,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_46,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_47,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_47,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_48,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_48,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_49,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_49,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_50,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_50,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_51,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_51,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_52,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_52,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_53,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_53,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_54,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_54,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_55,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_55,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_56,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_56,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_57,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_57,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_58,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_58,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_59,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_59,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_60,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_60,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_61,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_61,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_62,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_62,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_63,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_63,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_64,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_64,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_65,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_65,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_66,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_66,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_67,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_67,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_68,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_68,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_69,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_69,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_70,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_70,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_71,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_71,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_72,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_72,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_73,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_73,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_74,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_74,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_75,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_75,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_76,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_76,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_77,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_77,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_78,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_78,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_79,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_79,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_80,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_80,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_81,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_81,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_82,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_82,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_83,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_83,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_84,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_84,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_85,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_85,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_86,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_86,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_87,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_87,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_88,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_88,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_89,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_89,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_90,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_90,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_91,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_91,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_92,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_92,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_93,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_93,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_94,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_94,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_95,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_95,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_96,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_96,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_97,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_97,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_98,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_98,ou=regular,ou=people,dc=example,dc=com on M3 [31mCRITICAL[0m tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_99,ou=regular,ou=people,dc=example,dc=com): [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_99,ou=regular,ou=people,dc=example,dc=com on M3 | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_0 | 33.62 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:289 Search M1 employeeNumber=b'0' (vs. b'0') [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:295 Search M2 employeeNumber=b'0' (vs. b'0') [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:303 Search M3 employeeNumber=b'0' (vs. b'0') | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_1 | 33.63 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:395 Search M1 employeeNumber=b'1' (vs. b'1') [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:401 Search M2 employeeNumber=b'1' (vs. b'1') [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:409 Search M3 employeeNumber=b'1' (vs. b'1') | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_2 | 33.69 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:501 Search M1 employeeNumber=b'2' (vs. b'2') [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:507 Search M2 employeeNumber=b'2' (vs. b'2') [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:515 Search M3 employeeNumber=b'2' (vs. b'2') | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_3 | 33.64 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:607 Search M1 employeeNumber=b'3' (vs. b'3') [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:613 Search M2 employeeNumber=b'3' (vs. b'3') [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:621 Search M3 employeeNumber=b'3' (vs. b'3') | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_4 | 33.62 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:713 Search M1 employeeNumber=b'4' (vs. b'4') [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:719 Search M2 employeeNumber=b'4' (vs. b'4') [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:727 Search M3 employeeNumber=b'4' (vs. b'4') | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_5 | 33.63 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:819 Search M1 employeeNumber=b'5' (vs. b'5') [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:825 Search M2 employeeNumber=b'5' (vs. b'5') [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:833 Search M3 employeeNumber=b'5' (vs. b'5') | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_6 | 33.62 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:934 Search M1 employeeNumber=b'6.2' (vs. 6.2) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:940 Search M2 employeeNumber=b'6.2' (vs. 6.2) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:948 Search M3 employeeNumber=b'6.2' (vs. 6.2) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_7 | 33.66 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:1042 Search M1 employeeNumber=b'7.2' (vs. 7.2) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:1048 Search M2 employeeNumber=b'7.2' (vs. 7.2) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:1056 Search M3 employeeNumber=b'7.2' (vs. 7.2) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_8 | 33.67 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:1150 Search M1 employeeNumber=b'8.2' (vs. 8.2) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:1156 Search M2 employeeNumber=b'8.2' (vs. 8.2) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:1164 Search M3 employeeNumber=b'8.2' (vs. 8.2) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_9 | 33.69 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:1259 Search M1 employeeNumber=b'9.2' (vs. 9.2) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:1265 Search M2 employeeNumber=b'9.2' (vs. 9.2) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:1273 Search M3 employeeNumber=b'9.2' (vs. 9.2) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_10 | 33.62 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:1369 Search M1 employeeNumber=b'10.2' (vs. 10.2) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:1375 Search M2 employeeNumber=b'10.2' (vs. 10.2) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:1383 Search M3 employeeNumber=b'10.2' (vs. 10.2) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_11 | 33.63 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:1479 Search M1 employeeNumber=b'11.1' (vs. 11.1) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:1485 Search M2 employeeNumber=b'11.1' (vs. 11.1) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:1493 Search M3 employeeNumber=b'11.1' (vs. 11.1) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_12 | 33.71 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:1594 Search M1 employeeNumber=b'12.1' (vs. 12.1) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:1600 Search M2 employeeNumber=b'12.1' (vs. 12.1) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:1608 Search M3 employeeNumber=b'12.1' (vs. 12.1) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_13 | 33.57 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:1709 Search M1 employeeNumber=b'13.1' (vs. 13.1) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:1715 Search M2 employeeNumber=b'13.1' (vs. 13.1) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:1723 Search M3 employeeNumber=b'13.1' (vs. 13.1) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_14 | 33.70 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:1825 Search M1 employeeNumber=b'14.1' (vs. 14.1) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:1831 Search M2 employeeNumber=b'14.1' (vs. 14.1) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:1839 Search M3 employeeNumber=b'14.1' (vs. 14.1) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_15 | 33.62 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:1940 Search M1 employeeNumber=b'15.1' (vs. 15.1) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:1946 Search M2 employeeNumber=b'15.1' (vs. 15.1) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:1954 Search M3 employeeNumber=b'15.1' (vs. 15.1) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_16 | 37.60 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:2076 Search M1 employeeNumber=b'1.1' (vs. 1.1) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:2082 Search M2 employeeNumber=b'1.1' (vs. 1.1) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:2090 Search M3 employeeNumber=b'1.1' (vs. 1.1) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_17 | 37.66 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:2183 Search M1 employeeNumber=b'2.2' (vs. 2.2) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:2189 Search M2 employeeNumber=b'2.2' (vs. 2.2) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:2197 Search M3 employeeNumber=b'2.2' (vs. 2.2) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_18 | 37.71 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:2289 Search M1 employeeNumber=b'3.2' (vs. 3.2) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:2295 Search M2 employeeNumber=b'3.2' (vs. 3.2) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:2303 Search M3 employeeNumber=b'3.2' (vs. 3.2) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_19 | 38.78 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:2405 Search M1 employeeNumber=b'4.1' (vs. 4.1) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:2411 Search M2 employeeNumber=b'4.1' (vs. 4.1) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:2419 Search M3 employeeNumber=b'4.1' (vs. 4.1) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_20 | 38.73 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:2520 Search M1 employeeNumber=b'5.1' (vs. 5.1) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:2526 Search M2 employeeNumber=b'5.1' (vs. 5.1) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:2534 Search M3 employeeNumber=b'5.1' (vs. 5.1) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_21 | 38.72 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:2635 Search M1 employeeNumber=b'6.1' (vs. 6.1) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:2641 Search M2 employeeNumber=b'6.1' (vs. 6.1) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:2649 Search M3 employeeNumber=b'6.1' (vs. 6.1) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_22 | 38.70 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:2750 Search M1 employeeNumber=b'7.1' (vs. 7.1) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:2756 Search M2 employeeNumber=b'7.1' (vs. 7.1) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:2764 Search M3 employeeNumber=b'7.1' (vs. 7.1) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_23 | 39.76 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:2873 Search M1 employeeNumber=b'8.2' (vs. 8.2) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:2879 Search M2 employeeNumber=b'8.2' (vs. 8.2) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:2887 Search M3 employeeNumber=b'8.2' (vs. 8.2) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_24 | 39.82 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:2996 Search M1 employeeNumber=b'9.2' (vs. 9.2) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:3002 Search M2 employeeNumber=b'9.2' (vs. 9.2) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:3010 Search M3 employeeNumber=b'9.2' (vs. 9.2) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_25 | 39.82 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:3119 Search M1 employeeNumber=b'10.2' (vs. 10.2) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:3125 Search M2 employeeNumber=b'10.2' (vs. 10.2) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:3133 Search M3 employeeNumber=b'10.2' (vs. 10.2) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_26 | 39.73 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:3242 Search M1 employeeNumber=b'11.2' (vs. 11.2) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:3248 Search M2 employeeNumber=b'11.2' (vs. 11.2) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:3256 Search M3 employeeNumber=b'11.2' (vs. 11.2) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_27 | 39.77 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:3365 Search M1 employeeNumber=b'12.2' (vs. 12.2) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:3371 Search M2 employeeNumber=b'12.2' (vs. 12.2) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:3379 Search M3 employeeNumber=b'12.2' (vs. 12.2) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_28 | 39.71 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:3488 Search M1 employeeNumber=b'13.2' (vs. 13.2) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:3494 Search M2 employeeNumber=b'13.2' (vs. 13.2) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:3502 Search M3 employeeNumber=b'13.2' (vs. 13.2) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_29 | 40.04 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:3612 Search M1 employeeNumber=b'14.2' (vs. 14.2) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:3618 Search M2 employeeNumber=b'14.2' (vs. 14.2) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:3626 Search M3 employeeNumber=b'14.2' (vs. 14.2) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_30 | 39.97 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:3735 Search M1 employeeNumber=b'15.2' (vs. 15.2) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:3741 Search M2 employeeNumber=b'15.2' (vs. 15.2) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:3749 Search M3 employeeNumber=b'15.2' (vs. 15.2) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_31 | 40.73 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:3866 Search M1 employeeNumber=b'16.1' (vs. 16.1) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:3872 Search M2 employeeNumber=b'16.1' (vs. 16.1) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:3880 Search M3 employeeNumber=b'16.1' (vs. 16.1) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_32 | 40.75 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:3998 Search M1 employeeNumber=b'17.1' (vs. 17.1) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:4004 Search M2 employeeNumber=b'17.1' (vs. 17.1) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:4012 Search M3 employeeNumber=b'17.1' (vs. 17.1) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_33 | 40.61 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:4122 Search M1 employeeNumber=b'18.1' (vs. 18.1) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:4128 Search M2 employeeNumber=b'18.1' (vs. 18.1) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:4136 Search M3 employeeNumber=b'18.1' (vs. 18.1) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_34 | 40.76 | |
-------------------------------Captured log call-------------------------------- [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:4246 Search M1 employeeNumber=b'19.1' (vs. 19.1) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:4252 Search M2 employeeNumber=b'19.1' (vs. 19.1) [32mINFO [0m tests.tickets.ticket49658_test:ticket49658_test.py:4260 Search M3 employeeNumber=b'19.1' (vs. 19.1) | |||
Passed | tickets/ticket49788_test.py::test_ticket49781 | 0.15 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | tickets/ticket50078_test.py::test_ticket50078 | 4.45 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for master1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for hub1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39101, 'ldap-secureport': 63801, 'server-id': 'hub1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for consumer1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. [32mINFO [0m lib389.topologies:topologies.py:524 Creating replication topology. [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39101 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39101 is was created [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39101 is NOT working (expect 9146feec-fbbc-4012-a156-f982df332cba / got description=None) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39101 is working [32mINFO [0m lib389.replica:replica.py:2211 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39101 [32mINFO [0m lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 completed [32mINFO [0m lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39101 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is was created [32mINFO [0m lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39101 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 [32mINFO [0m lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 4ba8e2a8-cda9-4691-89f3-1d406e1eca8a / got description=9146feec-fbbc-4012-a156-f982df332cba) [32mINFO [0m lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-53.hosted.upshift.rdu2.redhat.com:39201 is working -------------------------------Captured log call-------------------------------- [32mINFO [0m tests.tickets.ticket50078_test:ticket50078_test.py:31 Replication is working. [32mINFO [0m tests.tickets.ticket50078_test:ticket50078_test.py:47 Rename the test entry test_user1... [32mINFO [0m tests.tickets.ticket50078_test:ticket50078_test.py:52 Replication is working. | |||
Passed | tickets/ticket50234_test.py::test_ticket50234 | 0.21 | |
-------------------------------Captured log setup------------------------------- [32mINFO [0m lib389.SetupDs:setup.py:658 Starting installation... [32mINFO [0m lib389.SetupDs:setup.py:686 Completed installation for standalone1 [32mINFO [0m lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. |