report.html

Report generated on 29-Oct-2020 at 01:02:07 by pytest-html v2.1.1

Environment

389-ds-base 1.4.5.0-20201029git5c25c06.fc32
Packages {"pluggy": "0.13.1", "py": "1.9.0", "pytest": "5.4.3"}
Platform Linux-5.7.7-200.fc32.x86_64-x86_64-with-glibc2.2.5
Plugins {"html": "2.1.1", "libfaketime": "0.1.2", "metadata": "1.10.0"}
Python 3.8.6
cyrus-sasl 2.1.27-4.fc32
nspr 4.29.0-1.fc32
nss 3.57.0-1.fc32
openldap 2.4.47-5.fc32

Summary

2064 tests ran in 17188.18 seconds.

1974 passed, 21 skipped, 62 failed, 4 errors, 20 expected failures, 8 unexpected passes

Results

Result Test Duration Links
Error tickets/ticket48973_test.py::test_ticket48973_init::setup 1.11
request = <SubRequest 'topology' for <Function test_ticket48973_init>>

@pytest.fixture(scope="module")
def topology(request):
# Creating standalone instance ...
standalone = DirSrv(verbose=False)
args_instance[SER_HOST] = HOST_STANDALONE
args_instance[SER_PORT] = PORT_STANDALONE
args_instance[SER_SERVERID_PROP] = SERVERID_STANDALONE
args_instance[SER_CREATION_SUFFIX] = DEFAULT_SUFFIX
args_standalone = args_instance.copy()
standalone.allocate(args_standalone)
instance_standalone = standalone.exists()
if instance_standalone:
standalone.delete()
> standalone.create()

/export/tests/tickets/ticket48973_test.py:52:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:838: in create
self._createDirsrv(version)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:808: in _createDirsrv
sds.create_from_args(general, slapd, backends, None)
/usr/local/lib/python3.8/site-packages/lib389/instance/setup.py:663: in create_from_args
self._prepare_ds(general, slapd, backends)
/usr/local/lib/python3.8/site-packages/lib389/instance/setup.py:594: in _prepare_ds
assert_c(slapd['root_dn'] is not None, "Configuration root_dn in section [slapd] not found")
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

condition = False, msg = 'Configuration root_dn in section [slapd] not found'

def assert_c(condition, msg="Assertion Failed"):
"""This is the same as assert, but assert is compiled out
when optimisation is enabled. This prevents compiling out.
"""
if not condition:
> raise AssertionError(msg)
E AssertionError: Configuration root_dn in section [slapd] not found

/usr/local/lib/python3.8/site-packages/lib389/utils.py:1243: AssertionError
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation...
Error tickets/ticket48973_test.py::test_ticket48973_ces_not_indexed::setup 0.00
request = <SubRequest 'topology' for <Function test_ticket48973_init>>

@pytest.fixture(scope="module")
def topology(request):
# Creating standalone instance ...
standalone = DirSrv(verbose=False)
args_instance[SER_HOST] = HOST_STANDALONE
args_instance[SER_PORT] = PORT_STANDALONE
args_instance[SER_SERVERID_PROP] = SERVERID_STANDALONE
args_instance[SER_CREATION_SUFFIX] = DEFAULT_SUFFIX
args_standalone = args_instance.copy()
standalone.allocate(args_standalone)
instance_standalone = standalone.exists()
if instance_standalone:
standalone.delete()
> standalone.create()

/export/tests/tickets/ticket48973_test.py:52:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:838: in create
self._createDirsrv(version)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:808: in _createDirsrv
sds.create_from_args(general, slapd, backends, None)
/usr/local/lib/python3.8/site-packages/lib389/instance/setup.py:663: in create_from_args
self._prepare_ds(general, slapd, backends)
/usr/local/lib/python3.8/site-packages/lib389/instance/setup.py:594: in _prepare_ds
assert_c(slapd['root_dn'] is not None, "Configuration root_dn in section [slapd] not found")
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

condition = False, msg = 'Configuration root_dn in section [slapd] not found'

def assert_c(condition, msg="Assertion Failed"):
"""This is the same as assert, but assert is compiled out
when optimisation is enabled. This prevents compiling out.
"""
if not condition:
> raise AssertionError(msg)
E AssertionError: Configuration root_dn in section [slapd] not found

/usr/local/lib/python3.8/site-packages/lib389/utils.py:1243: AssertionError
Error tickets/ticket48973_test.py::test_ticket48973_homeDirectory_indexing::setup 0.00
request = <SubRequest 'topology' for <Function test_ticket48973_init>>

@pytest.fixture(scope="module")
def topology(request):
# Creating standalone instance ...
standalone = DirSrv(verbose=False)
args_instance[SER_HOST] = HOST_STANDALONE
args_instance[SER_PORT] = PORT_STANDALONE
args_instance[SER_SERVERID_PROP] = SERVERID_STANDALONE
args_instance[SER_CREATION_SUFFIX] = DEFAULT_SUFFIX
args_standalone = args_instance.copy()
standalone.allocate(args_standalone)
instance_standalone = standalone.exists()
if instance_standalone:
standalone.delete()
> standalone.create()

/export/tests/tickets/ticket48973_test.py:52:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:838: in create
self._createDirsrv(version)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:808: in _createDirsrv
sds.create_from_args(general, slapd, backends, None)
/usr/local/lib/python3.8/site-packages/lib389/instance/setup.py:663: in create_from_args
self._prepare_ds(general, slapd, backends)
/usr/local/lib/python3.8/site-packages/lib389/instance/setup.py:594: in _prepare_ds
assert_c(slapd['root_dn'] is not None, "Configuration root_dn in section [slapd] not found")
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

condition = False, msg = 'Configuration root_dn in section [slapd] not found'

def assert_c(condition, msg="Assertion Failed"):
"""This is the same as assert, but assert is compiled out
when optimisation is enabled. This prevents compiling out.
"""
if not condition:
> raise AssertionError(msg)
E AssertionError: Configuration root_dn in section [slapd] not found

/usr/local/lib/python3.8/site-packages/lib389/utils.py:1243: AssertionError
Error tickets/ticket48973_test.py::test_ticket48973_homeDirectory_caseExactIA5Match_caseIgnoreIA5Match_indexing::setup 0.00
request = <SubRequest 'topology' for <Function test_ticket48973_init>>

@pytest.fixture(scope="module")
def topology(request):
# Creating standalone instance ...
standalone = DirSrv(verbose=False)
args_instance[SER_HOST] = HOST_STANDALONE
args_instance[SER_PORT] = PORT_STANDALONE
args_instance[SER_SERVERID_PROP] = SERVERID_STANDALONE
args_instance[SER_CREATION_SUFFIX] = DEFAULT_SUFFIX
args_standalone = args_instance.copy()
standalone.allocate(args_standalone)
instance_standalone = standalone.exists()
if instance_standalone:
standalone.delete()
> standalone.create()

/export/tests/tickets/ticket48973_test.py:52:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:838: in create
self._createDirsrv(version)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:808: in _createDirsrv
sds.create_from_args(general, slapd, backends, None)
/usr/local/lib/python3.8/site-packages/lib389/instance/setup.py:663: in create_from_args
self._prepare_ds(general, slapd, backends)
/usr/local/lib/python3.8/site-packages/lib389/instance/setup.py:594: in _prepare_ds
assert_c(slapd['root_dn'] is not None, "Configuration root_dn in section [slapd] not found")
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

condition = False, msg = 'Configuration root_dn in section [slapd] not found'

def assert_c(condition, msg="Assertion Failed"):
"""This is the same as assert, but assert is compiled out
when optimisation is enabled. This prevents compiling out.
"""
if not condition:
> raise AssertionError(msg)
E AssertionError: Configuration root_dn in section [slapd] not found

/usr/local/lib/python3.8/site-packages/lib389/utils.py:1243: AssertionError
Failed suites/acl/keywords_part2_test.py::test_access_from_certain_network_only_ip 3.85
topo = <lib389.topologies.TopologyMain object at 0x7fd179ca8a30>
add_user = None, aci_of_user = None

def test_access_from_certain_network_only_ip(topo, add_user, aci_of_user):
"""
User can access the data when connecting from certain network only as per the ACI.

:id: 4ec38296-7ac5-11e8-9816-8c16451d917b
:setup: Standalone Server
:steps:
1. Add test entry
2. Add ACI
3. User should follow ACI role
:expectedresults:
1. Entry should be added
2. Operation should succeed
3. Operation should succeed
"""
# Turn access log buffering off to make less time consuming
topo.standalone.config.set('nsslapd-accesslog-logbuffering', 'off')

# Find the ip from ds logs , as we need to know the exact ip used by ds to run the instances.
# Wait till Access Log is generated
topo.standalone.restart()

# Add ACI
domain = Domain(topo.standalone, DEFAULT_SUFFIX)
domain.add("aci", f'(target = "ldap:///{IP_OU_KEY}")(targetattr=\"*\")(version 3.0; aci "IP aci"; '
f'allow(all)userdn = "ldap:///{NETSCAPEIP_KEY}" and ip = "::1" ;)')

# create a new connection for the test
conn = UserAccount(topo.standalone, NETSCAPEIP_KEY).bind(PW_DM)
# Perform Operation
org = OrganizationalUnit(conn, IP_OU_KEY)
> org.replace("seeAlso", "cn=1")

suites/acl/keywords_part2_test.py:76:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:280: in replace
self.set(key, value, action=ldap.MOD_REPLACE)
/usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:446: in set
return self._instance.modify_ext_s(self._dn, [(action, key, value)],
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:613: in modify_ext_s
resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:764: in result3
resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4(
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:774: in result4
ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call
reraise(exc_type, exc_value, exc_traceback)
/usr/local/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise
raise exc_value
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd179ca8880>
func = <built-in method result4 of LDAP object at 0x7fd179c8e120>
args = (3, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None
exc_type = None, exc_value = None, exc_traceback = None

def _ldap_call(self,func,*args,**kwargs):
"""
Wrapper method mainly for serializing calls into OpenLDAP libs
and trace logs
"""
self._ldap_object_lock.acquire()
if __debug__:
if self._trace_level>=1:
self._trace_file.write('*** %s %s - %s\n%s\n' % (
repr(self),
self._uri,
'.'.join((self.__class__.__name__,func.__name__)),
pprint.pformat((args,kwargs))
))
if self._trace_level>=9:
traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)
diagnostic_message_success = None
try:
try:
> result = func(*args,**kwargs)
E ldap.INSUFFICIENT_ACCESS: {'msgtype': 103, 'msgid': 3, 'result': 50, 'desc': 'Insufficient access', 'ctrls': [], 'info': "Insufficient 'write' privilege to the 'seeAlso' attribute of entry 'ou=ip,ou=keywords,dc=example,dc=com'.\n"}

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:324: INSUFFICIENT_ACCESS
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Failed suites/acl/keywords_part2_test.py::test_connectin_from_an_unauthorized_network 0.31
topo = <lib389.topologies.TopologyMain object at 0x7fd179ca8a30>
add_user = None, aci_of_user = None

def test_connectin_from_an_unauthorized_network(topo, add_user, aci_of_user):
"""
User cannot access the data when connectin from an unauthorized network as per the ACI.

:id: 52d1ecce-7ac5-11e8-9ad9-8c16451d917b
:setup: Standalone Server
:steps:
1. Add test entry
2. Add ACI
3. User should follow ACI role
:expectedresults:
1. Entry should be added
2. Operation should succeed
3. Operation should succeed
"""

# Add ACI
domain = Domain(topo.standalone, DEFAULT_SUFFIX)
domain.add("aci", f'(target = "ldap:///{IP_OU_KEY}")'
f'(targetattr="*")(version 3.0; aci "IP aci"; '
f'allow(all) userdn = "ldap:///{NETSCAPEIP_KEY}" '
f'and ip != "::1" ;)')

# create a new connection for the test
conn = UserAccount(topo.standalone, NETSCAPEIP_KEY).bind(PW_DM)
# Perform Operation
org = OrganizationalUnit(conn, IP_OU_KEY)
with pytest.raises(ldap.INSUFFICIENT_ACCESS):
> org.replace("seeAlso", "cn=1")
E Failed: DID NOT RAISE <class 'ldap.INSUFFICIENT_ACCESS'>

suites/acl/keywords_part2_test.py:119: Failed
Failed suites/clu/repl_monitor_test.py::test_dsconf_replication_monitor 0.82
topology_m2 = <lib389.topologies.TopologyMain object at 0x7fd17822cfd0>
set_log_file = None

@pytest.mark.ds50545
@pytest.mark.bz1739718
@pytest.mark.skipif(ds_is_older("1.4.0"), reason="Not implemented")
def test_dsconf_replication_monitor(topology_m2, set_log_file):
"""Test replication monitor that was ported from legacy tools

:id: ce48020d-7c30-41b7-8f68-144c9cd757f6
:setup: 2 MM topology
:steps:
1. Create DS instance
2. Run replication monitor with connections option
3. Run replication monitor with aliases option
4. Run replication monitor with --json option
5. Run replication monitor with .dsrc file created
:expectedresults:
1. Success
2. Success
3. Success
4. Success
5. Success
"""

m1 = topology_m2.ms["master1"]
m2 = topology_m2.ms["master2"]

alias_content = ['Supplier: M1 (' + m1.host + ':' + str(m1.port) + ')',
'Supplier: M2 (' + m2.host + ':' + str(m2.port) + ')']

connection_content = 'Supplier: '+ m1.host + ':' + str(m1.port)
content_list = ['Replica Root: dc=example,dc=com',
'Replica ID: 1',
'Replica Status: Available',
'Max CSN',
'Status For Agreement: "002" ('+ m2.host + ':' + str(m2.port) + ')',
'Replica Enabled: on',
'Update In Progress: FALSE',
'Last Update Start:',
'Last Update End:',
'Number Of Changes Sent:',
'Number Of Changes Skipped: None',
'Last Update Status: Error (0) Replica acquired successfully: Incremental update succeeded',
'Last Init Start:',
'Last Init End:',
'Last Init Status:',
'Reap Active: 0',
'Replication Status: In Synchronization',
'Replication Lag Time:',
'Supplier: ',
m2.host + ':' + str(m2.port),
'Replica Root: dc=example,dc=com',
'Replica ID: 2',
'Status For Agreement: "001" (' + m1.host + ':' + str(m1.port)+')']

json_list = ['type',
'list',
'items',
'name',
m1.host + ':' + str(m1.port),
'data',
'"replica_id": "1"',
'"replica_root": "dc=example,dc=com"',
'"replica_status": "Available"',
'maxcsn',
'agmts_status',
'agmt-name',
'002',
'replica',
m2.host + ':' + str(m2.port),
'replica-enabled',
'update-in-progress',
'last-update-start',
'last-update-end',
'number-changes-sent',
'number-changes-skipped',
'last-update-status',
'Error (0) Replica acquired successfully: Incremental update succeeded',
'last-init-start',
'last-init-end',
'last-init-status',
'reap-active',
'replication-status',
'In Synchronization',
'replication-lag-time',
'"replica_id": "2"',
'001',
m1.host + ':' + str(m1.port)]

dsrc_content = '[repl-monitor-connections]\n' \
'connection1 = ' + m1.host + ':' + str(m1.port) + ':' + DN_DM + ':' + PW_DM + '\n' \
'connection2 = ' + m2.host + ':' + str(m2.port) + ':' + DN_DM + ':' + PW_DM + '\n' \
'\n' \
'[repl-monitor-aliases]\n' \
'M1 = ' + m1.host + ':' + str(m1.port) + '\n' \
'M2 = ' + m2.host + ':' + str(m2.port)

connections = [m1.host + ':' + str(m1.port) + ':' + DN_DM + ':' + PW_DM,
m2.host + ':' + str(m2.port) + ':' + DN_DM + ':' + PW_DM]

aliases = ['M1=' + m1.host + ':' + str(m1.port),
'M2=' + m2.host + ':' + str(m2.port)]

args = FakeArgs()
args.connections = connections
args.aliases = None
args.json = False

log.info('Run replication monitor with connections option')
get_repl_monitor_info(m1, DEFAULT_SUFFIX, log, args)
check_value_in_log_and_reset(content_list, connection_content)

log.info('Run replication monitor with aliases option')
args.aliases = aliases
get_repl_monitor_info(m1, DEFAULT_SUFFIX, log, args)
> check_value_in_log_and_reset(content_list, alias_content)

suites/clu/repl_monitor_test.py:177:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

content_list = ['Replica Root: dc=example,dc=com', 'Replica ID: 1', 'Replica Status: Available', 'Max CSN', 'Status For Agreement: "002" (ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002)', 'Replica Enabled: on', ...]
second_list = ['Supplier: M1 (ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001)', 'Supplier: M2 (ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002)']
single_value = None

def check_value_in_log_and_reset(content_list, second_list=None, single_value=None):
with open(LOG_FILE, 'r+') as f:
file_content = f.read()

for item in content_list:
log.info('Check that "{}" is present'.format(item))
assert item in file_content

if second_list is not None:
log.info('Check for "{}"'.format(second_list))
for item in second_list:
> assert item in file_content
E AssertionError: assert 'Supplier: M1 (ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001)' in 'Run replication monitor with aliases option\ndsrc path: /root/.dsrc\ndsrc container path: /data/config/container.inf\...t Init Status: unavailable\nReap Active: 0\nReplication Status: In Synchronization\nReplication Lag Time: 00:00:00\n\n'

suites/clu/repl_monitor_test.py:54: AssertionError
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 032e47fe-b3eb-4599-83db-e582949880aa / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 90d4dd3e-5bae-43fa-b896-dcb774cfc7cd / got description=032e47fe-b3eb-4599-83db-e582949880aa) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
-------------------------------Captured log call--------------------------------
INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:170 Run replication monitor with connections option DEBUG  tests.suites.clu.repl_monitor_test:dsrc.py:76 dsrc path: /root/.dsrc DEBUG  tests.suites.clu.repl_monitor_test:dsrc.py:77 dsrc container path: /data/config/container.inf DEBUG  tests.suites.clu.repl_monitor_test:dsrc.py:85 dsrc instances: [] DEBUG  tests.suites.clu.repl_monitor_test:dsrc.py:210 dsrc completed with {'connections': None, 'aliases': None} INFO  tests.suites.clu.repl_monitor_test:replication.py:438 Supplier: localhost.localdomain:39001 INFO  tests.suites.clu.repl_monitor_test:replication.py:443 ------------------------------------- INFO  tests.suites.clu.repl_monitor_test:replication.py:455 Replica Root: dc=example,dc=com INFO  tests.suites.clu.repl_monitor_test:replication.py:456 Replica ID: 1 INFO  tests.suites.clu.repl_monitor_test:replication.py:457 Replica Status: Available INFO  tests.suites.clu.repl_monitor_test:replication.py:458 Max CSN: 5f9a0f1b000000010000 INFO  tests.suites.clu.repl_monitor_test:replication.py:461 Status For Agreement: "002" (ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002) Replica Enabled: on Update In Progress: FALSE Last Update Start: 20201029003852Z Last Update End: 20201029003852Z Number Of Changes Sent: 1:2/0 Number Of Changes Skipped: None Last Update Status: Error (0) Replica acquired successfully: Incremental update succeeded Last Init Start: 19700101000000Z Last Init End: 19700101000000Z Last Init Status: unavailable Reap Active: 0 Replication Status: In Synchronization Replication Lag Time: 00:00:00 INFO  tests.suites.clu.repl_monitor_test:replication.py:438 Supplier: ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  tests.suites.clu.repl_monitor_test:replication.py:443 ----------------------------------------------------------------- INFO  tests.suites.clu.repl_monitor_test:replication.py:455 Replica Root: dc=example,dc=com INFO  tests.suites.clu.repl_monitor_test:replication.py:456 Replica ID: 2 INFO  tests.suites.clu.repl_monitor_test:replication.py:457 Replica Status: Available INFO  tests.suites.clu.repl_monitor_test:replication.py:458 Max CSN: 5f9a0f1c000000020000 INFO  tests.suites.clu.repl_monitor_test:replication.py:461 Status For Agreement: "001" (ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001) Replica Enabled: on Update In Progress: FALSE Last Update Start: 20201029003852Z Last Update End: 20201029003852Z Number Of Changes Sent: 2:1/0 Number Of Changes Skipped: None Last Update Status: Error (0) Replica acquired successfully: Incremental update succeeded Last Init Start: 19700101000000Z Last Init End: 19700101000000Z Last Init Status: unavailable Reap Active: 0 Replication Status: In Synchronization Replication Lag Time: 00:00:00 INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Replica Root: dc=example,dc=com" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Replica ID: 1" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Replica Status: Available" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Max CSN" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Status For Agreement: "002" (ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002)" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Replica Enabled: on" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Update In Progress: FALSE" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Last Update Start:" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Last Update End:" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Number Of Changes Sent:" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Number Of Changes Skipped: None" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Last Update Status: Error (0) Replica acquired successfully: Incremental update succeeded" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Last Init Start:" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Last Init End:" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Last Init Status:" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Reap Active: 0" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Replication Status: In Synchronization" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Replication Lag Time:" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Supplier: " is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Replica Root: dc=example,dc=com" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Replica ID: 2" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Status For Agreement: "001" (ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001)" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:52 Check for "Supplier: ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001" INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:60 Reset log file INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:174 Run replication monitor with aliases option DEBUG  tests.suites.clu.repl_monitor_test:dsrc.py:76 dsrc path: /root/.dsrc DEBUG  tests.suites.clu.repl_monitor_test:dsrc.py:77 dsrc container path: /data/config/container.inf DEBUG  tests.suites.clu.repl_monitor_test:dsrc.py:85 dsrc instances: [] DEBUG  tests.suites.clu.repl_monitor_test:dsrc.py:210 dsrc completed with {'connections': None, 'aliases': None} INFO  tests.suites.clu.repl_monitor_test:replication.py:438 Supplier: localhost.localdomain:39001 INFO  tests.suites.clu.repl_monitor_test:replication.py:443 ------------------------------------- INFO  tests.suites.clu.repl_monitor_test:replication.py:455 Replica Root: dc=example,dc=com INFO  tests.suites.clu.repl_monitor_test:replication.py:456 Replica ID: 1 INFO  tests.suites.clu.repl_monitor_test:replication.py:457 Replica Status: Available INFO  tests.suites.clu.repl_monitor_test:replication.py:458 Max CSN: 5f9a0f1b000000010000 INFO  tests.suites.clu.repl_monitor_test:replication.py:461 Status For Agreement: "002" (ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002) Replica Enabled: on Update In Progress: FALSE Last Update Start: 20201029003852Z Last Update End: 20201029003852Z Number Of Changes Sent: 1:2/0 Number Of Changes Skipped: None Last Update Status: Error (0) Replica acquired successfully: Incremental update succeeded Last Init Start: 19700101000000Z Last Init End: 19700101000000Z Last Init Status: unavailable Reap Active: 0 Replication Status: In Synchronization Replication Lag Time: 00:00:00 INFO  tests.suites.clu.repl_monitor_test:replication.py:438 Supplier: M2 (ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002) INFO  tests.suites.clu.repl_monitor_test:replication.py:443 ---------------------------------------------------------------------- INFO  tests.suites.clu.repl_monitor_test:replication.py:455 Replica Root: dc=example,dc=com INFO  tests.suites.clu.repl_monitor_test:replication.py:456 Replica ID: 2 INFO  tests.suites.clu.repl_monitor_test:replication.py:457 Replica Status: Available INFO  tests.suites.clu.repl_monitor_test:replication.py:458 Max CSN: 5f9a0f1c000000020000 INFO  tests.suites.clu.repl_monitor_test:replication.py:461 Status For Agreement: "001" (ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001) Replica Enabled: on Update In Progress: FALSE Last Update Start: 20201029003852Z Last Update End: 20201029003852Z Number Of Changes Sent: 2:1/0 Number Of Changes Skipped: None Last Update Status: Error (0) Replica acquired successfully: Incremental update succeeded Last Init Start: 19700101000000Z Last Init End: 19700101000000Z Last Init Status: unavailable Reap Active: 0 Replication Status: In Synchronization Replication Lag Time: 00:00:00 INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Replica Root: dc=example,dc=com" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Replica ID: 1" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Replica Status: Available" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Max CSN" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Status For Agreement: "002" (ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002)" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Replica Enabled: on" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Update In Progress: FALSE" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Last Update Start:" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Last Update End:" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Number Of Changes Sent:" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Number Of Changes Skipped: None" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Last Update Status: Error (0) Replica acquired successfully: Incremental update succeeded" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Last Init Start:" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Last Init End:" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Last Init Status:" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Reap Active: 0" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Replication Status: In Synchronization" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Replication Lag Time:" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Supplier: " is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Replica Root: dc=example,dc=com" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Replica ID: 2" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:48 Check that "Status For Agreement: "001" (ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001)" is present INFO  tests.suites.clu.repl_monitor_test:repl_monitor_test.py:52 Check for "['Supplier: M1 (ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001)', 'Supplier: M2 (ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002)']"
Failed suites/gssapi/simple_gssapi_test.py::test_gssapi_bind 0.26
topology_st_gssapi = <lib389.topologies.TopologyMain object at 0x7fd1647a97c0>
testuser = <lib389.idm.user.UserAccount object at 0x7fd17646d460>

@gssapi_ack
def test_gssapi_bind(topology_st_gssapi, testuser):
"""Test that we can bind with GSSAPI

:id: 894a4c27-3d4c-4ba3-aa33-2910032e3783

:setup: standalone gssapi instance

:steps:
1. Bind with sasl/gssapi
:expectedresults:
1. Bind succeeds

"""
> conn = testuser.bind_gssapi()

suites/gssapi/simple_gssapi_test.py:53:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/usr/local/lib/python3.8/site-packages/lib389/idm/account.py:258: in bind_gssapi
inst_clone.open(saslmethod='gssapi')
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:995: in open
self.sasl_interactive_bind_s("", sasl_auth, escapehatch='i am sure')
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:476: in sasl_interactive_bind_s
return self._ldap_call(self._l.sasl_interactive_bind_s,who,auth,RequestControlTuples(serverctrls),RequestControlTuples(clientctrls),sasl_flags)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call
reraise(exc_type, exc_value, exc_traceback)
/usr/local/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise
raise exc_value
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd164784700>
func = <built-in method sasl_interactive_bind_s of LDAP object at 0x7fd1663869f0>
args = ('', <ldap.sasl.gssapi object at 0x7fd166386bb0>, None, None, 2)
kwargs = {}, diagnostic_message_success = None, exc_type = None
exc_value = None, exc_traceback = None

def _ldap_call(self,func,*args,**kwargs):
"""
Wrapper method mainly for serializing calls into OpenLDAP libs
and trace logs
"""
self._ldap_object_lock.acquire()
if __debug__:
if self._trace_level>=1:
self._trace_file.write('*** %s %s - %s\n%s\n' % (
repr(self),
self._uri,
'.'.join((self.__class__.__name__,func.__name__)),
pprint.pformat((args,kwargs))
))
if self._trace_level>=9:
traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)
diagnostic_message_success = None
try:
try:
> result = func(*args,**kwargs)
E ldap.INVALID_CREDENTIALS: {'result': 49, 'desc': 'Invalid credentials', 'ctrls': [], 'info': 'SASL(-1): generic failure: GSSAPI Error: An invalid name was supplied (Included profile file could not be read)'}

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:324: INVALID_CREDENTIALS
-----------------------------Captured stdout setup------------------------------
Kerberos master password: yhdu8oBKWHVPib2qdcxtSrJWMikVB9D3aBpJNUGxnq33qy.jX60.IgwUDmvdGXTOj Loading random data Initializing database '/var/kerberos/krb5kdc/principal' for realm 'HOSTED.UPSHIFT.RDU2.REDHAT.COM', master key name 'K/M@HOSTED.UPSHIFT.RDU2.REDHAT.COM' Authenticating as principal root/admin@HOSTED.UPSHIFT.RDU2.REDHAT.COM with password. Principal "ldap/ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com@HOSTED.UPSHIFT.RDU2.REDHAT.COM" created. Authenticating as principal root/admin@HOSTED.UPSHIFT.RDU2.REDHAT.COM with password. K/M@HOSTED.UPSHIFT.RDU2.REDHAT.COM kadmin/admin@HOSTED.UPSHIFT.RDU2.REDHAT.COM kadmin/changepw@HOSTED.UPSHIFT.RDU2.REDHAT.COM kadmin/ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com@HOSTED.UPSHIFT.RDU2.REDHAT.COM kiprop/ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com@HOSTED.UPSHIFT.RDU2.REDHAT.COM krbtgt/HOSTED.UPSHIFT.RDU2.REDHAT.COM@HOSTED.UPSHIFT.RDU2.REDHAT.COM ldap/ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com@HOSTED.UPSHIFT.RDU2.REDHAT.COM Authenticating as principal root/admin@HOSTED.UPSHIFT.RDU2.REDHAT.COM with password. Entry for principal ldap/ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com@HOSTED.UPSHIFT.RDU2.REDHAT.COM with kvno 2, encryption type aes256-cts-hmac-sha1-96 added to keytab WRFILE:/etc/krb5.keytab. Entry for principal ldap/ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com@HOSTED.UPSHIFT.RDU2.REDHAT.COM with kvno 2, encryption type aes128-cts-hmac-sha1-96 added to keytab WRFILE:/etc/krb5.keytab. Authenticating as principal root/admin@HOSTED.UPSHIFT.RDU2.REDHAT.COM with password. Principal "testuser@HOSTED.UPSHIFT.RDU2.REDHAT.COM" created. Authenticating as principal root/admin@HOSTED.UPSHIFT.RDU2.REDHAT.COM with password. K/M@HOSTED.UPSHIFT.RDU2.REDHAT.COM kadmin/admin@HOSTED.UPSHIFT.RDU2.REDHAT.COM kadmin/changepw@HOSTED.UPSHIFT.RDU2.REDHAT.COM kadmin/ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com@HOSTED.UPSHIFT.RDU2.REDHAT.COM kiprop/ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com@HOSTED.UPSHIFT.RDU2.REDHAT.COM krbtgt/HOSTED.UPSHIFT.RDU2.REDHAT.COM@HOSTED.UPSHIFT.RDU2.REDHAT.COM ldap/ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com@HOSTED.UPSHIFT.RDU2.REDHAT.COM testuser@HOSTED.UPSHIFT.RDU2.REDHAT.COM Authenticating as principal root/admin@HOSTED.UPSHIFT.RDU2.REDHAT.COM with password. Entry for principal testuser@HOSTED.UPSHIFT.RDU2.REDHAT.COM with kvno 2, encryption type aes256-cts-hmac-sha1-96 added to keytab WRFILE:/tmp/testuser.keytab. Entry for principal testuser@HOSTED.UPSHIFT.RDU2.REDHAT.COM with kvno 2, encryption type aes128-cts-hmac-sha1-96 added to keytab WRFILE:/tmp/testuser.keytab.
-----------------------------Captured stderr setup------------------------------
No policy specified for ldap/ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com@HOSTED.UPSHIFT.RDU2.REDHAT.COM; defaulting to no policy No policy specified for testuser@HOSTED.UPSHIFT.RDU2.REDHAT.COM; defaulting to no policy
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Failed suites/gssapi/simple_gssapi_test.py::test_support_mech 0.32
topology_st_gssapi = <lib389.topologies.TopologyMain object at 0x7fd1647a97c0>
testuser = <lib389.idm.user.UserAccount object at 0x7fd17646d460>

@gssapi_ack
def test_support_mech(topology_st_gssapi, testuser):
"""Test allowed sasl mechs works when GSSAPI is allowed

:id: 6ec80aca-00c4-4141-b96b-3ae8837fc751

:setup: standalone gssapi instance

:steps:
1. Add GSSAPI to allowed sasl mechanisms.
2. Attempt to bind
:expectedresults:
1. The allowed mechs are changed.
2. The bind succeeds.
"""
topology_st_gssapi.standalone.config.set('nsslapd-allowed-sasl-mechanisms', 'GSSAPI EXTERNAL ANONYMOUS')
> conn = testuser.bind_gssapi()

suites/gssapi/simple_gssapi_test.py:125:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/usr/local/lib/python3.8/site-packages/lib389/idm/account.py:258: in bind_gssapi
inst_clone.open(saslmethod='gssapi')
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:995: in open
self.sasl_interactive_bind_s("", sasl_auth, escapehatch='i am sure')
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:476: in sasl_interactive_bind_s
return self._ldap_call(self._l.sasl_interactive_bind_s,who,auth,RequestControlTuples(serverctrls),RequestControlTuples(clientctrls),sasl_flags)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call
reraise(exc_type, exc_value, exc_traceback)
/usr/local/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise
raise exc_value
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd1756b94f0>
func = <built-in method sasl_interactive_bind_s of LDAP object at 0x7fd165fbc9c0>
args = ('', <ldap.sasl.gssapi object at 0x7fd165fbc880>, None, None, 2)
kwargs = {}, diagnostic_message_success = None, exc_type = None
exc_value = None, exc_traceback = None

def _ldap_call(self,func,*args,**kwargs):
"""
Wrapper method mainly for serializing calls into OpenLDAP libs
and trace logs
"""
self._ldap_object_lock.acquire()
if __debug__:
if self._trace_level>=1:
self._trace_file.write('*** %s %s - %s\n%s\n' % (
repr(self),
self._uri,
'.'.join((self.__class__.__name__,func.__name__)),
pprint.pformat((args,kwargs))
))
if self._trace_level>=9:
traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)
diagnostic_message_success = None
try:
try:
> result = func(*args,**kwargs)
E ldap.INVALID_CREDENTIALS: {'result': 49, 'desc': 'Invalid credentials', 'ctrls': [], 'info': 'SASL(-1): generic failure: GSSAPI Error: An invalid name was supplied (Included profile file could not be read)'}

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:324: INVALID_CREDENTIALS
Failed suites/healthcheck/health_security_test.py::test_healthcheck_certif_expiring_within_30d 11.03
topology_st = <lib389.topologies.TopologyMain object at 0x7fd1663ab8b0>

@pytest.mark.ds50873
@pytest.mark.bz1685160
@pytest.mark.xfail(ds_is_older("1.4.1"), reason="Not implemented")
def test_healthcheck_certif_expiring_within_30d(topology_st):
"""Check if HealthCheck returns DSCERTLE0001 code

:id: c2165032-88ba-4978-a4ca-2fecfd8c35d8
:setup: Standalone instance
:steps:
1. Create DS instance
2. Use libfaketime to tell the process the date is within 30 days before certificate expiration
3. Use HealthCheck without --json option
4. Use HealthCheck with --json option
:expectedresults:
1. Success
2. Success
3. Healthcheck reports DSCERTLE0001 code and related details
4. Healthcheck reports DSCERTLE0001 code and related details
"""

RET_CODE = 'DSCERTLE0001'

standalone = topology_st.standalone

standalone.enable_tls()

# Cert is valid two years from today, so we count the date that is within 30 days before certificate expiration
date_future = datetime.now() + timedelta(days=701)

with libfaketime.fake_time(date_future):
time.sleep(1)
> run_healthcheck_and_flush_log(topology_st, standalone, RET_CODE, json=False)

suites/healthcheck/health_security_test.py:304:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

topology = <lib389.topologies.TopologyMain object at 0x7fd1663ab8b0>
instance = <lib389.DirSrv object at 0x7fd1756e67c0>
searched_code = 'DSCERTLE0001', json = False, searched_code2 = None

def run_healthcheck_and_flush_log(topology, instance, searched_code, json, searched_code2=None):
args = FakeArgs()
args.instance = instance.serverid
args.verbose = instance.verbose
args.list_errors = False
args.list_checks = False
args.check = ['config', 'encryption', 'tls', 'fschecks']
args.dry_run = False

if json:
log.info('Use healthcheck with --json option')
args.json = json
health_check_run(instance, topology.logcap.log, args)
assert topology.logcap.contains(searched_code)
log.info('Healthcheck returned searched code: %s' % searched_code)

if searched_code2 is not None:
assert topology.logcap.contains(searched_code2)
log.info('Healthcheck returned searched code: %s' % searched_code2)
else:
log.info('Use healthcheck without --json option')
args.json = json
health_check_run(instance, topology.logcap.log, args)
> assert topology.logcap.contains(searched_code)
E AssertionError: assert False
E + where False = <bound method LogCapture.contains of <LogCapture (NOTSET)>>('DSCERTLE0001')
E + where <bound method LogCapture.contains of <LogCapture (NOTSET)>> = <LogCapture (NOTSET)>.contains
E + where <LogCapture (NOTSET)> = <lib389.topologies.TopologyMain object at 0x7fd1663ab8b0>.logcap

suites/healthcheck/health_security_test.py:67: AssertionError
-------------------------------Captured log call--------------------------------
INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking config:hr_timestamp ... INFO  LogCapture:health.py:99 Checking config:passwordscheme ... INFO  LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO  LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO  LogCapture:health.py:99 Checking fschecks:file_perms ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:111 No issues found.
Failed suites/healthcheck/health_security_test.py::test_healthcheck_certif_expired 10.44
topology_st = <lib389.topologies.TopologyMain object at 0x7fd1663ab8b0>

@pytest.mark.ds50873
@pytest.mark.bz1685160
@pytest.mark.xfail(ds_is_older("1.4.1"), reason="Not implemented")
def test_healthcheck_certif_expired(topology_st):
"""Check if HealthCheck returns DSCERTLE0002 code

:id: ceff2c22-62c0-4fd9-b737-930a88458d68
:setup: Standalone instance
:steps:
1. Create DS instance
2. Use libfaketime to tell the process the date is after certificate expiration
3. Use HealthCheck without --json option
4. Use HealthCheck with --json option
:expectedresults:
1. Success
2. Success
3. Healthcheck reports DSCERTLE0002 code and related details
4. Healthcheck reports DSCERTLE0002 code and related details
"""

RET_CODE = 'DSCERTLE0002'

standalone = topology_st.standalone

standalone.enable_tls()

# Cert is valid two years from today, so we count the date that is after expiration
date_future = datetime.now() + timedelta(days=731)

with libfaketime.fake_time(date_future):
time.sleep(1)
> run_healthcheck_and_flush_log(topology_st, standalone, RET_CODE, json=False)

suites/healthcheck/health_security_test.py:343:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

topology = <lib389.topologies.TopologyMain object at 0x7fd1663ab8b0>
instance = <lib389.DirSrv object at 0x7fd1756e67c0>
searched_code = 'DSCERTLE0002', json = False, searched_code2 = None

def run_healthcheck_and_flush_log(topology, instance, searched_code, json, searched_code2=None):
args = FakeArgs()
args.instance = instance.serverid
args.verbose = instance.verbose
args.list_errors = False
args.list_checks = False
args.check = ['config', 'encryption', 'tls', 'fschecks']
args.dry_run = False

if json:
log.info('Use healthcheck with --json option')
args.json = json
health_check_run(instance, topology.logcap.log, args)
assert topology.logcap.contains(searched_code)
log.info('Healthcheck returned searched code: %s' % searched_code)

if searched_code2 is not None:
assert topology.logcap.contains(searched_code2)
log.info('Healthcheck returned searched code: %s' % searched_code2)
else:
log.info('Use healthcheck without --json option')
args.json = json
health_check_run(instance, topology.logcap.log, args)
> assert topology.logcap.contains(searched_code)
E AssertionError: assert False
E + where False = <bound method LogCapture.contains of <LogCapture (NOTSET)>>('DSCERTLE0002')
E + where <bound method LogCapture.contains of <LogCapture (NOTSET)>> = <LogCapture (NOTSET)>.contains
E + where <LogCapture (NOTSET)> = <lib389.topologies.TopologyMain object at 0x7fd1663ab8b0>.logcap

suites/healthcheck/health_security_test.py:67: AssertionError
-------------------------------Captured log call--------------------------------
INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking config:hr_timestamp ... INFO  LogCapture:health.py:99 Checking config:passwordscheme ... INFO  LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO  LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO  LogCapture:health.py:99 Checking fschecks:file_perms ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:119 2 Issues found! Generating report ... INFO  LogCapture:health.py:45 [1] DS Lint Error: DSCERTLE0001 INFO  LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO  LogCapture:health.py:47 Severity: MEDIUM INFO  LogCapture:health.py:49 Check: tls:certificate_expiration INFO  LogCapture:health.py:50 Affects: INFO  LogCapture:health.py:52 -- Expiring Certificate INFO  LogCapture:health.py:53 Details: INFO  LogCapture:health.py:54 ----------- INFO  LogCapture:health.py:55 The certificate (Self-Signed-CA) will expire in less than 30 days INFO  LogCapture:health.py:56 Resolution: INFO  LogCapture:health.py:57 ----------- INFO  LogCapture:health.py:58 Renew the certificate before it expires to prevent disruptions with TLS connections. INFO  LogCapture:health.py:45 [2] DS Lint Error: DSCERTLE0001 INFO  LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO  LogCapture:health.py:47 Severity: MEDIUM INFO  LogCapture:health.py:49 Check: tls:certificate_expiration INFO  LogCapture:health.py:50 Affects: INFO  LogCapture:health.py:52 -- Expiring Certificate INFO  LogCapture:health.py:53 Details: INFO  LogCapture:health.py:54 ----------- INFO  LogCapture:health.py:55 The certificate (Server-Cert) will expire in less than 30 days INFO  LogCapture:health.py:56 Resolution: INFO  LogCapture:health.py:57 ----------- INFO  LogCapture:health.py:58 Renew the certificate before it expires to prevent disruptions with TLS connections. INFO  LogCapture:health.py:124 ===== End Of Report (2 Issues found) =====
Failed suites/import/import_test.py::test_fast_slow_import 10.88
topo = <lib389.topologies.TopologyMain object at 0x7fd164853f70>
_toggle_private_import_mem = None, _import_clean = None

def test_fast_slow_import(topo, _toggle_private_import_mem, _import_clean):
"""With nsslapd-db-private-import-mem: on is faster import.

:id: 3044331c-9c0e-11ea-ac9f-8c16451d917b
:setup: Standalone Instance
:steps:
1. Let's set nsslapd-db-private-import-mem:on, nsslapd-import-cache-autosize: 0
2. Measure offline import time duration total_time1
3. Now nsslapd-db-private-import-mem:off
4. Measure offline import time duration total_time2
5. total_time1 < total_time2
6. Set nsslapd-db-private-import-mem:on, nsslapd-import-cache-autosize: -1
7. Measure offline import time duration total_time1
8. Now nsslapd-db-private-import-mem:off
9. Measure offline import time duration total_time2
10. total_time1 < total_time2
:expected results:
1. Operation successful
2. Operation successful
3. Operation successful
4. Operation successful
5. Operation successful
6. Operation successful
7. Operation successful
8. Operation successful
9. Operation successful
10. Operation successful
"""
# Let's set nsslapd-db-private-import-mem:on, nsslapd-import-cache-autosize: 0
config = LDBMConfig(topo.standalone)
# Measure offline import time duration total_time1
total_time1 = _import_offline(topo, 20)
# Now nsslapd-db-private-import-mem:off
config.replace('nsslapd-db-private-import-mem', 'off')
accounts = Accounts(topo.standalone, DEFAULT_SUFFIX)
for i in accounts.filter('(uid=*)'):
UserAccount(topo.standalone, i.dn).delete()
# Measure offline import time duration total_time2
total_time2 = _import_offline(topo, 20)
# total_time1 < total_time2
> assert total_time1 < total_time2
E assert 2.2280073165893555 < 2.0450422763824463

suites/import/import_test.py:307: AssertionError
Failed suites/paged_results/paged_results_test.py::test_search_paged_limits[conf_attr_values1-PASS] 5.23
topology_st = <lib389.topologies.TopologyMain object at 0x7fd175f8c220>
create_user = <lib389.idm.user.UserAccount object at 0x7fd1764f8430>
conf_attr_values = ('5000', '120', '122'), expected_rs = 'PASS'

@pytest.mark.parametrize('conf_attr_values,expected_rs',
((('5000', '100', '100'), ldap.ADMINLIMIT_EXCEEDED),
(('5000', '120', '122'), 'PASS')))
def test_search_paged_limits(topology_st, create_user, conf_attr_values, expected_rs):
"""Verify that nsslapd-idlistscanlimit and
nsslapd-lookthroughlimit can limit the administrator
search abilities.

:id: e0f8b916-7276-4bd3-9e73-8696a4468811
:parametrized: yes
:setup: Standalone instance, test user for binding,
10 users for the search base
:steps:
1. Set nsslapd-sizelimit and nsslapd-pagedsizelimit to 5000
2. Set nsslapd-idlistscanlimit: 120
3. Set nsslapd-lookthroughlimit: 122
4. Bind as test user
5. Search through added users with a simple paged control
using page_size = 10
6. Bind as Directory Manager
7. Set nsslapd-idlistscanlimit: 100
8. Set nsslapd-lookthroughlimit: 100
9. Bind as test user
10. Search through added users with a simple paged control
using page_size = 10
:expectedresults:
1. nsslapd-sizelimit and nsslapd-pagedsizelimit
should be successfully set
2. nsslapd-idlistscanlimit should be successfully set
3. nsslapd-lookthroughlimit should be successfully set
4. Bind should be successful
5. No error happens, all users should be found
6. Bind should be successful
7. nsslapd-idlistscanlimit should be successfully set
8. nsslapd-lookthroughlimit should be successfully set
9. Bind should be successful
10. It should throw ADMINLIMIT_EXCEEDED exception
"""

users_num = 101
page_size = 10
users_list = add_users(topology_st, users_num, DEFAULT_SUFFIX)
search_flt = r'(uid=test*)'
searchreq_attrlist = ['dn', 'sn']
size_attr_bck = change_conf_attr(topology_st, DN_CONFIG, 'nsslapd-sizelimit', conf_attr_values[0])
pagedsize_attr_bck = change_conf_attr(topology_st, DN_CONFIG, 'nsslapd-pagedsizelimit', conf_attr_values[0])
idlistscan_attr_bck = change_conf_attr(topology_st, 'cn=config,%s' % DN_LDBM, 'nsslapd-idlistscanlimit', conf_attr_values[1])
lookthrough_attr_bck = change_conf_attr(topology_st, 'cn=config,%s' % DN_LDBM, 'nsslapd-lookthroughlimit', conf_attr_values[2])

try:
log.info('Set user bind')
conn = create_user.bind(TEST_USER_PWD)

req_ctrl = SimplePagedResultsControl(True, size=page_size, cookie='')
controls = [req_ctrl]

if expected_rs == ldap.ADMINLIMIT_EXCEEDED:
log.info('Expect to fail with ADMINLIMIT_EXCEEDED')
with pytest.raises(expected_rs):
all_results = paged_search(conn, DEFAULT_SUFFIX, controls, search_flt, searchreq_attrlist)
elif expected_rs == 'PASS':
log.info('Expect to pass')
> all_results = paged_search(conn, DEFAULT_SUFFIX, controls, search_flt, searchreq_attrlist)

suites/paged_results/paged_results_test.py:901:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
suites/paged_results/paged_results_test.py:200: in paged_search
rtype, rdata, rmsgid, rctrls = conn.result3(msgid)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:764: in result3
resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4(
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:774: in result4
ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call
reraise(exc_type, exc_value, exc_traceback)
/usr/local/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise
raise exc_value
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd176196070>
func = <built-in method result4 of LDAP object at 0x7fd16483ab70>
args = (12, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None
exc_type = None, exc_value = None, exc_traceback = None

def _ldap_call(self,func,*args,**kwargs):
"""
Wrapper method mainly for serializing calls into OpenLDAP libs
and trace logs
"""
self._ldap_object_lock.acquire()
if __debug__:
if self._trace_level>=1:
self._trace_file.write('*** %s %s - %s\n%s\n' % (
repr(self),
self._uri,
'.'.join((self.__class__.__name__,func.__name__)),
pprint.pformat((args,kwargs))
))
if self._trace_level>=9:
traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)
diagnostic_message_success = None
try:
try:
> result = func(*args,**kwargs)
E ldap.ADMINLIMIT_EXCEEDED: {'msgtype': 100, 'msgid': 12, 'result': 11, 'desc': 'Administrative limit exceeded', 'ctrls': []}

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:324: ADMINLIMIT_EXCEEDED
-------------------------------Captured log call--------------------------------
INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 101 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-sizelimit to 5000. Previous value - b'2000'. Modified suffix - cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to 5000. Previous value - b'0'. Modified suffix - cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-idlistscanlimit to 120. Previous value - b'4000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-lookthroughlimit to 122. Previous value - b'5000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:889 Set user bind INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:900 Expect to pass INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 10; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7fd16480e340>]. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 6 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 7 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 8 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 9 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 101 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-sizelimit to b'2000'. Previous value - b'5000'. Modified suffix - cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to b'0'. Previous value - b'5000'. Modified suffix - cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-lookthroughlimit to b'5000'. Previous value - b'122'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-idlistscanlimit to b'4000'. Previous value - b'120'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config.
Failed suites/paged_results/paged_results_test.py::test_search_paged_user_limits[conf_attr_values1-PASS] 4.32
topology_st = <lib389.topologies.TopologyMain object at 0x7fd175f8c220>
create_user = <lib389.idm.user.UserAccount object at 0x7fd1764f8430>
conf_attr_values = ('1000', '120', '122'), expected_rs = 'PASS'

@pytest.mark.parametrize('conf_attr_values,expected_rs',
((('1000', '100', '100'), ldap.ADMINLIMIT_EXCEEDED),
(('1000', '120', '122'), 'PASS')))
def test_search_paged_user_limits(topology_st, create_user, conf_attr_values, expected_rs):
"""Verify that nsPagedIDListScanLimit and nsPagedLookthroughLimit
override nsslapd-idlistscanlimit and nsslapd-lookthroughlimit
while performing search with the simple paged results control.

:id: 69e393e9-1ab8-4f4e-b4a1-06ca63dc7b1b
:parametrized: yes
:setup: Standalone instance, test user for binding,
10 users for the search base
:steps:
1. Set nsslapd-idlistscanlimit: 1000
2. Set nsslapd-lookthroughlimit: 1000
3. Set nsPagedIDListScanLimit: 120
4. Set nsPagedLookthroughLimit: 122
5. Bind as test user
6. Search through added users with a simple paged control
using page_size = 10
7. Bind as Directory Manager
8. Set nsPagedIDListScanLimit: 100
9. Set nsPagedLookthroughLimit: 100
10. Bind as test user
11. Search through added users with a simple paged control
using page_size = 10
:expectedresults:
1. nsslapd-idlistscanlimit should be successfully set
2. nsslapd-lookthroughlimit should be successfully set
3. nsPagedIDListScanLimit should be successfully set
4. nsPagedLookthroughLimit should be successfully set
5. Bind should be successful
6. No error happens, all users should be found
7. Bind should be successful
8. nsPagedIDListScanLimit should be successfully set
9. nsPagedLookthroughLimit should be successfully set
10. Bind should be successful
11. It should throw ADMINLIMIT_EXCEEDED exception
"""

users_num = 101
page_size = 10
users_list = add_users(topology_st, users_num, DEFAULT_SUFFIX)
search_flt = r'(uid=test*)'
searchreq_attrlist = ['dn', 'sn']
lookthrough_attr_bck = change_conf_attr(topology_st, 'cn=config,%s' % DN_LDBM, 'nsslapd-lookthroughlimit', conf_attr_values[0])
idlistscan_attr_bck = change_conf_attr(topology_st, 'cn=config,%s' % DN_LDBM, 'nsslapd-idlistscanlimit', conf_attr_values[0])
user_idlistscan_attr_bck = change_conf_attr(topology_st, create_user.dn, 'nsPagedIDListScanLimit', conf_attr_values[1])
user_lookthrough_attr_bck = change_conf_attr(topology_st, create_user.dn, 'nsPagedLookthroughLimit', conf_attr_values[2])

try:
log.info('Set user bind')
conn = create_user.bind(TEST_USER_PWD)

req_ctrl = SimplePagedResultsControl(True, size=page_size, cookie='')
controls = [req_ctrl]

if expected_rs == ldap.ADMINLIMIT_EXCEEDED:
log.info('Expect to fail with ADMINLIMIT_EXCEEDED')
with pytest.raises(expected_rs):
all_results = paged_search(conn, DEFAULT_SUFFIX, controls, search_flt, searchreq_attrlist)
elif expected_rs == 'PASS':
log.info('Expect to pass')
> all_results = paged_search(conn, DEFAULT_SUFFIX, controls, search_flt, searchreq_attrlist)

suites/paged_results/paged_results_test.py:975:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
suites/paged_results/paged_results_test.py:200: in paged_search
rtype, rdata, rmsgid, rctrls = conn.result3(msgid)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:764: in result3
resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4(
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:774: in result4
ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call
reraise(exc_type, exc_value, exc_traceback)
/usr/local/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise
raise exc_value
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd1647949d0>
func = <built-in method result4 of LDAP object at 0x7fd1763c31b0>
args = (12, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None
exc_type = None, exc_value = None, exc_traceback = None

def _ldap_call(self,func,*args,**kwargs):
"""
Wrapper method mainly for serializing calls into OpenLDAP libs
and trace logs
"""
self._ldap_object_lock.acquire()
if __debug__:
if self._trace_level>=1:
self._trace_file.write('*** %s %s - %s\n%s\n' % (
repr(self),
self._uri,
'.'.join((self.__class__.__name__,func.__name__)),
pprint.pformat((args,kwargs))
))
if self._trace_level>=9:
traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)
diagnostic_message_success = None
try:
try:
> result = func(*args,**kwargs)
E ldap.ADMINLIMIT_EXCEEDED: {'msgtype': 100, 'msgid': 12, 'result': 11, 'desc': 'Administrative limit exceeded', 'ctrls': []}

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:324: ADMINLIMIT_EXCEEDED
-------------------------------Captured log call--------------------------------
INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 101 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-lookthroughlimit to 1000. Previous value - b'5000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-idlistscanlimit to 1000. Previous value - b'4000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedIDListScanLimit to 120. Previous value - None. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedLookthroughLimit to 122. Previous value - None. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:963 Set user bind INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:974 Expect to pass INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 10; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7fd1647b2160>]. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 6 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 7 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 8 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 9 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 101 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-lookthroughlimit to b'5000'. Previous value - b'1000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-idlistscanlimit to b'4000'. Previous value - b'1000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedIDListScanLimit to None. Previous value - b'120'. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedLookthroughLimit to None. Previous value - b'122'. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com.
Failed suites/replication/conflict_resolve_test.py::TestTwoMasters::test_complex_add_modify_modrdn_delete 84.89
self = <tests.suites.replication.conflict_resolve_test.TestTwoMasters object at 0x7fd17ac12370>
topology_m2 = <lib389.topologies.TopologyMain object at 0x7fd17a953460>
base_m2 = <lib389.idm.nscontainer.nsContainer object at 0x7fd1762ed460>

def test_complex_add_modify_modrdn_delete(self, topology_m2, base_m2):
"""Check that conflict properly resolved for complex operations
which involve add, modify, modrdn and delete

:id: 77f09b18-03d1-45da-940b-1ad2c2908eb1
:setup: Two master replication, test container for entries, enable plugin logging,
audit log, error log for replica and access log for internal
:steps:
1. Add ten users to m1 and wait for replication to happen
2. Pause replication
3. Test add-del on m1 and add on m2
4. Test add-mod on m1 and add on m2
5. Test add-modrdn on m1 and add on m2
6. Test multiple add, modrdn
7. Test Add-del on both masters
8. Test modrdn-modrdn
9. Test modrdn-del
10. Resume replication
11. Check that the entries on both masters are the same and replication is working
:expectedresults:
1. It should pass
2. It should pass
3. It should pass
4. It should pass
5. It should pass
6. It should pass
7. It should pass
8. It should pass
9. It should pass
10. It should pass
11. It should pass
"""

M1 = topology_m2.ms["master1"]
M2 = topology_m2.ms["master2"]

test_users_m1 = UserAccounts(M1, base_m2.dn, rdn=None)
test_users_m2 = UserAccounts(M2, base_m2.dn, rdn=None)
repl = ReplicationManager(SUFFIX)

for user_num in range(1100, 1110):
_create_user(test_users_m1, user_num)

repl.test_replication(M1, M2)
topology_m2.pause_all_replicas()

log.info("Test add-del on M1 and add on M2")
user_num += 1
_create_user(test_users_m1, user_num)
_delete_user(test_users_m1, user_num, sleep=True)
_create_user(test_users_m2, user_num, sleep=True)

user_num += 1
_create_user(test_users_m1, user_num, sleep=True)
_create_user(test_users_m2, user_num, sleep=True)
_delete_user(test_users_m1, user_num, sleep=True)

user_num += 1
_create_user(test_users_m2, user_num, sleep=True)
_create_user(test_users_m1, user_num)
_delete_user(test_users_m1, user_num)

log.info("Test add-mod on M1 and add on M2")
user_num += 1
_create_user(test_users_m1, user_num)
_modify_user(test_users_m1, user_num, sleep=True)
_create_user(test_users_m2, user_num, sleep=True)

user_num += 1
_create_user(test_users_m1, user_num, sleep=True)
_create_user(test_users_m2, user_num, sleep=True)
_modify_user(test_users_m1, user_num, sleep=True)

user_num += 1
_create_user(test_users_m2, user_num, sleep=True)
_create_user(test_users_m1, user_num)
_modify_user(test_users_m1, user_num)

log.info("Test add-modrdn on M1 and add on M2")
user_num += 1
_create_user(test_users_m1, user_num)
_rename_user(test_users_m1, user_num, user_num+20, sleep=True)
_create_user(test_users_m2, user_num, sleep=True)

user_num += 1
_create_user(test_users_m1, user_num, sleep=True)
_create_user(test_users_m2, user_num, sleep=True)
_rename_user(test_users_m1, user_num, user_num+20, sleep=True)

user_num += 1
_create_user(test_users_m2, user_num, sleep=True)
_create_user(test_users_m1, user_num)
_rename_user(test_users_m1, user_num, user_num+20)

log.info("Test multiple add, modrdn")
user_num += 1
_create_user(test_users_m1, user_num, sleep=True)
_create_user(test_users_m2, user_num, sleep=True)
_rename_user(test_users_m1, user_num, user_num+20)
_create_user(test_users_m1, user_num, sleep=True)
_modify_user(test_users_m2, user_num, sleep=True)

log.info("Add - del on both masters")
user_num += 1
_create_user(test_users_m1, user_num)
_delete_user(test_users_m1, user_num, sleep=True)
_create_user(test_users_m2, user_num)
_delete_user(test_users_m2, user_num, sleep=True)

log.info("Test modrdn - modrdn")
user_num += 1
_rename_user(test_users_m1, 1109, 1129, sleep=True)
_rename_user(test_users_m2, 1109, 1129, sleep=True)

log.info("Test modrdn - del")
user_num += 1
_rename_user(test_users_m1, 1100, 1120, sleep=True)
_delete_user(test_users_m2, 1100)

user_num += 1
_delete_user(test_users_m2, 1101, sleep=True)
_rename_user(test_users_m1, 1101, 1121)

topology_m2.resume_all_replicas()

repl.test_replication_topology(topology_m2)
time.sleep(30)

user_dns_m1 = [user.dn for user in test_users_m1.list()]
user_dns_m2 = [user.dn for user in test_users_m2.list()]
> assert set(user_dns_m1) == set(user_dns_m2)
E AssertionError: assert {'uid=test_us...,dc=com', ...} == {'uid=test_us...,dc=com', ...}
E Extra items in the left set:
E 'uid=test_user_1112,cn=test_container,dc=example,dc=com'
E 'uid=test_user_1111,cn=test_container,dc=example,dc=com'
E 'uid=test_user_1117,cn=test_container,dc=example,dc=com'
E Full diff:
E {
E 'uid=test_user_1102,cn=test_container,dc=example,dc=com',...
E
E ...Full output truncated (24 lines hidden), use '-vv' to show

suites/replication/conflict_resolve_test.py:369: AssertionError
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect d98cc056-ea9d-4cbf-8b7d-d8cbc4e8a733 / got description=3d1bde72-2f0d-42c6-ba82-880f124685c6) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect d98cc056-ea9d-4cbf-8b7d-d8cbc4e8a733 / got description=3d1bde72-2f0d-42c6-ba82-880f124685c6) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect d98cc056-ea9d-4cbf-8b7d-d8cbc4e8a733 / got description=3d1bde72-2f0d-42c6-ba82-880f124685c6) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect d98cc056-ea9d-4cbf-8b7d-d8cbc4e8a733 / got description=3d1bde72-2f0d-42c6-ba82-880f124685c6) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect d98cc056-ea9d-4cbf-8b7d-d8cbc4e8a733 / got description=3d1bde72-2f0d-42c6-ba82-880f124685c6) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:285 Test add-del on M1 and add on M2 INFO  tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:301 Test add-mod on M1 and add on M2 INFO  tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:317 Test add-modrdn on M1 and add on M2 INFO  tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:333 Test multiple add, modrdn INFO  tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:341 Add - del on both masters INFO  tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:348 Test modrdn - modrdn INFO  tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:353 Test modrdn - del INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect eccacb9b-ef81-4f4f-973e-90cf6ce39684 / got description=d98cc056-ea9d-4cbf-8b7d-d8cbc4e8a733) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect eccacb9b-ef81-4f4f-973e-90cf6ce39684 / got description=d98cc056-ea9d-4cbf-8b7d-d8cbc4e8a733) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect eccacb9b-ef81-4f4f-973e-90cf6ce39684 / got description=d98cc056-ea9d-4cbf-8b7d-d8cbc4e8a733) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect eccacb9b-ef81-4f4f-973e-90cf6ce39684 / got description=d98cc056-ea9d-4cbf-8b7d-d8cbc4e8a733) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect eccacb9b-ef81-4f4f-973e-90cf6ce39684 / got description=d98cc056-ea9d-4cbf-8b7d-d8cbc4e8a733) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect c9dfbd85-b780-4c9c-b520-9bc6a02b7d27 / got description=eccacb9b-ef81-4f4f-973e-90cf6ce39684) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working
Failed suites/schema/schema_reload_test.py::test_schema_operation 0.45
topo = <lib389.topologies.TopologyMain object at 0x7fd175378f70>

def test_schema_operation(topo):
"""Test that the cases in original schema are preserved.
Test that duplicated schema except cases are not loaded
Test to use a custom schema

:id: e7448863-ac62-4b49-b013-4efa412c0455
:setup: Standalone instance
:steps:
1. Create a test schema with cases
2. Run a schema_reload task
3. Check the attribute is present
4. Case 2: Check duplicated schema except cases are not loaded
5. Case 2-1: Use the custom schema

:expectedresults:
1. Operation should be successful
2. Operation should be successful
3. Operation should be successful
4. Operation should be successful
5. Operation should be successful
"""

log.info('case 1: Test the cases in the original schema are preserved.')

schema_filename = topo.standalone.schemadir + '/98test.ldif'
try:
with open(schema_filename, "w") as schema_file:
schema_file.write("dn: cn=schema\n")
schema_file.write("attributetypes: ( 8.9.10.11.12.13.14 NAME " +
"'MoZiLLaaTTRiBuTe' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 " +
" X-ORIGIN 'Mozilla Dummy Schema' )\n")
schema_file.write("objectclasses: ( 1.2.3.4.5.6.7 NAME 'MozillaObject' " +
"SUP top MUST ( objectclass $ cn ) MAY ( MoZiLLaaTTRiBuTe )" +
" X-ORIGIN 'user defined' )')\n")

except OSError as e:
log.fatal("Failed to create schema file: " +
"{} Error: {}".format(schema_filename, str(e)))


# run the schema reload task with the default schemadir
schema = Schema(topo.standalone)
task = schema.reload(schema_dir=topo.standalone.schemadir)
task.wait()

subschema = topo.standalone.schema.get_subschema()
at_obj = subschema.get_obj(ldap.schema.AttributeType, 'MoZiLLaaTTRiBuTe')

> assert at_obj is not None, "The attribute was not found on server"
E AssertionError: The attribute was not found on server
E assert None is not None

suites/schema/schema_reload_test.py:120: AssertionError
-------------------------------Captured log call--------------------------------
INFO  tests.suites.schema.schema_reload_test:schema_reload_test.py:94 case 1: Test the cases in the original schema are preserved.
Failed suites/schema/schema_reload_test.py::test_valid_schema 2.02
topo = <lib389.topologies.TopologyMain object at 0x7fd175378f70>

def test_valid_schema(topo):
"""Test schema-reload task with valid schema

:id: 2ab304c0-3e58-4d34-b23b-a14b5997c7a8
:setup: Standalone instance
:steps:
1. Create schema file with valid schema
2. Run schema-reload.pl script
3. Run ldapsearch and check if schema was added
:expectedresults:
1. File creation should work
2. The schema reload task should be successful
3. Searching the server should return the new schema
"""

log.info("Test schema-reload task with valid schema")

# Step 1 - Create schema file
log.info("Create valid schema file (99user.ldif)...")
schema_filename = (topo.standalone.schemadir + "/99user.ldif")
try:
with open(schema_filename, 'w') as schema_file:
schema_file.write("dn: cn=schema\n")
schema_file.write("attributetypes: ( 8.9.10.11.12.13.13 NAME " +
"'ValidAttribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15" +
" X-ORIGIN 'Mozilla Dummy Schema' )\n")
schema_file.write("objectclasses: ( 1.2.3.4.5.6.7.8 NAME 'TestObject' " +
"SUP top MUST ( objectclass $ cn ) MAY ( givenName $ " +
"sn $ ValidAttribute ) X-ORIGIN 'user defined' )')\n")
except OSError as e:
log.fatal("Failed to create schema file: " +
"{} Error: {}".format(schema_filename, str(e)))

# Step 2 - Run the schema-reload task
log.info("Run the schema-reload task...")
schema = Schema(topo.standalone)
task = schema.reload(schema_dir=topo.standalone.schemadir)
task.wait()
> assert task.get_exit_code() == 0, "The schema reload task failed"
E AssertionError: The schema reload task failed
E assert 65 == 0
E +65
E -0

suites/schema/schema_reload_test.py:207: AssertionError
-------------------------------Captured log call--------------------------------
INFO  tests.suites.schema.schema_reload_test:schema_reload_test.py:184 Test schema-reload task with valid schema INFO  tests.suites.schema.schema_reload_test:schema_reload_test.py:187 Create valid schema file (99user.ldif)... INFO  tests.suites.schema.schema_reload_test:schema_reload_test.py:203 Run the schema-reload task...
Failed suites/syncrepl_plugin/basic_test.py::test_sync_repl_cookie 0.00
topology = <lib389.topologies.TopologyMain object at 0x7fd16763ed00>
request = <FixtureRequest for <Function test_sync_repl_cookie>>

def test_sync_repl_cookie(topology, request):
"""Test sync_repl cookie are progressing is an increasing order
when there are nested updates

:id: d7fbde25-5702-46ac-b38e-169d7a68e97c
:setup: Standalone Instance
:steps:
1.: enable retroCL
2.: configure retroCL to log nsuniqueid as targetUniqueId
3.: enable content_sync plugin
4.: enable automember
5.: create (2) groups. Few groups can help to reproduce the concurrent updates problem.
6.: configure automember to provision those groups with 'member'
7.: enable and configure memberof plugin
8.: enable plugin log level
9.: restart the server
10.: create a thread dedicated to run a sync repl client
11.: Create (9) users that will generate nested updates (automember/memberof)
12.: stop sync repl client and collect the list of cookie.change_no
13.: check that cookies.change_no are in increasing order
:expectedresults:
1.: succeeds
2.: succeeds
3.: succeeds
4.: succeeds
5.: succeeds
6.: succeeds
7.: succeeds
8.: succeeds
9.: succeeds
10.: succeeds
11.: succeeds
12.: succeeds
13.: succeeds
"""
inst = topology[0]

# Enable/configure retroCL
plugin = RetroChangelogPlugin(inst)
> plugin.disable()

suites/syncrepl_plugin/basic_test.py:275:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/usr/local/lib/python3.8/site-packages/lib389/plugins.py:63: in disable
self.set('nsslapd-pluginEnabled', 'off')
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.plugins.RetroChangelogPlugin object at 0x7fd16763cc40>
key = 'nsslapd-pluginEnabled', value = 'off', action = 2

def set(self, key, value, action=ldap.MOD_REPLACE):
"""Perform a specified action on a key with value

:param key: an attribute name
:type key: str
:param value: an attribute value
:type value: str
:param action: - ldap.MOD_REPLACE - by default
- ldap.MOD_ADD
- ldap.MOD_DELETE
:type action: int

:returns: result of modify_s operation
:raises: ValueError - if instance is not online
"""

if action == ldap.MOD_ADD:
action_txt = "ADD"
elif action == ldap.MOD_REPLACE:
action_txt = "REPLACE"
elif action == ldap.MOD_DELETE:
action_txt = "DELETE"
else:
# This should never happen (bug!)
action_txt = "UNKNOWN"

if value is None or len(value) < 512:
self._log.debug("%s set %s: (%r, %r)" % (self._dn, action_txt, key, display_log_value(key, value)))
else:
self._log.debug("%s set %s: (%r, value too large)" % (self._dn, action_txt, key))
if self._instance.state != DIRSRV_STATE_ONLINE:
> raise ValueError("Invalid state. Cannot set properties on instance that is not ONLINE.")
E ValueError: Invalid state. Cannot set properties on instance that is not ONLINE.

/usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:438: ValueError
Failed suites/syncrepl_plugin/basic_test.py::test_sync_repl_cookie_add_del 0.00
topology = <lib389.topologies.TopologyMain object at 0x7fd16763ed00>
request = <FixtureRequest for <Function test_sync_repl_cookie_add_del>>

def test_sync_repl_cookie_add_del(topology, request):
"""Test sync_repl cookie are progressing is an increasing order
when there add and del

:id: 83e11038-6ed0-4a5b-ac77-e44887ab11e3
:setup: Standalone Instance
:steps:
1.: enable retroCL
2.: configure retroCL to log nsuniqueid as targetUniqueId
3.: enable content_sync plugin
4.: enable automember
5.: create (2) groups. Few groups can help to reproduce the concurrent updates problem.
6.: configure automember to provision those groups with 'member'
7.: enable and configure memberof plugin
8.: enable plugin log level
9.: restart the server
10.: create a thread dedicated to run a sync repl client
11.: Create (3) users that will generate nested updates (automember/memberof)
12.: Delete (3) users
13.: stop sync repl client and collect the list of cookie.change_no
14.: check that cookies.change_no are in increasing order
:expectedresults:
1.: succeeds
2.: succeeds
3.: succeeds
4.: succeeds
5.: succeeds
6.: succeeds
7.: succeeds
8.: succeeds
9.: succeeds
10.: succeeds
11.: succeeds
12.: succeeds
13.: succeeds
14.: succeeds
"""
inst = topology[0]

# Enable/configure retroCL
plugin = RetroChangelogPlugin(inst)
> plugin.disable()

suites/syncrepl_plugin/basic_test.py:407:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/usr/local/lib/python3.8/site-packages/lib389/plugins.py:63: in disable
self.set('nsslapd-pluginEnabled', 'off')
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.plugins.RetroChangelogPlugin object at 0x7fd16763c1c0>
key = 'nsslapd-pluginEnabled', value = 'off', action = 2

def set(self, key, value, action=ldap.MOD_REPLACE):
"""Perform a specified action on a key with value

:param key: an attribute name
:type key: str
:param value: an attribute value
:type value: str
:param action: - ldap.MOD_REPLACE - by default
- ldap.MOD_ADD
- ldap.MOD_DELETE
:type action: int

:returns: result of modify_s operation
:raises: ValueError - if instance is not online
"""

if action == ldap.MOD_ADD:
action_txt = "ADD"
elif action == ldap.MOD_REPLACE:
action_txt = "REPLACE"
elif action == ldap.MOD_DELETE:
action_txt = "DELETE"
else:
# This should never happen (bug!)
action_txt = "UNKNOWN"

if value is None or len(value) < 512:
self._log.debug("%s set %s: (%r, %r)" % (self._dn, action_txt, key, display_log_value(key, value)))
else:
self._log.debug("%s set %s: (%r, value too large)" % (self._dn, action_txt, key))
if self._instance.state != DIRSRV_STATE_ONLINE:
> raise ValueError("Invalid state. Cannot set properties on instance that is not ONLINE.")
E ValueError: Invalid state. Cannot set properties on instance that is not ONLINE.

/usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:438: ValueError
Failed suites/syncrepl_plugin/basic_test.py::test_sync_repl_cookie_with_failure 0.00
topology = <lib389.topologies.TopologyMain object at 0x7fd16763ed00>
request = <FixtureRequest for <Function test_sync_repl_cookie_with_failure>>

def test_sync_repl_cookie_with_failure(topology, request):
"""Test sync_repl cookie are progressing is the right order
when there is a failure in nested updates

:id: e0103448-170e-4080-8f22-c34606447ce2
:setup: Standalone Instance
:steps:
1.: enable retroCL
2.: configure retroCL to log nsuniqueid as targetUniqueId
3.: enable content_sync plugin
4.: enable automember
5.: create (4) groups.
make group2 groupOfUniqueNames so the automember
will fail to add 'member' (uniqueMember expected)
6.: configure automember to provision those groups with 'member'
7.: enable and configure memberof plugin
8.: enable plugin log level
9.: restart the server
10.: create a thread dedicated to run a sync repl client
11.: Create a group that will be the only update received by sync repl client
12.: Create (9) users that will generate nested updates (automember/memberof)
13.: stop sync repl client and collect the list of cookie.change_no
14.: check that the list of cookie.change_no contains only the group 'step 11'
:expectedresults:
1.: succeeds
2.: succeeds
3.: succeeds
4.: succeeds
5.: succeeds
6.: succeeds
7.: succeeds
8.: succeeds
9.: succeeds
10.: succeeds
11.: succeeds
12.: Fails (expected)
13.: succeeds
14.: succeeds
"""
inst = topology[0]

# Enable/configure retroCL
plugin = RetroChangelogPlugin(inst)
> plugin.disable()

suites/syncrepl_plugin/basic_test.py:539:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/usr/local/lib/python3.8/site-packages/lib389/plugins.py:63: in disable
self.set('nsslapd-pluginEnabled', 'off')
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.plugins.RetroChangelogPlugin object at 0x7fd174d77ee0>
key = 'nsslapd-pluginEnabled', value = 'off', action = 2

def set(self, key, value, action=ldap.MOD_REPLACE):
"""Perform a specified action on a key with value

:param key: an attribute name
:type key: str
:param value: an attribute value
:type value: str
:param action: - ldap.MOD_REPLACE - by default
- ldap.MOD_ADD
- ldap.MOD_DELETE
:type action: int

:returns: result of modify_s operation
:raises: ValueError - if instance is not online
"""

if action == ldap.MOD_ADD:
action_txt = "ADD"
elif action == ldap.MOD_REPLACE:
action_txt = "REPLACE"
elif action == ldap.MOD_DELETE:
action_txt = "DELETE"
else:
# This should never happen (bug!)
action_txt = "UNKNOWN"

if value is None or len(value) < 512:
self._log.debug("%s set %s: (%r, %r)" % (self._dn, action_txt, key, display_log_value(key, value)))
else:
self._log.debug("%s set %s: (%r, value too large)" % (self._dn, action_txt, key))
if self._instance.state != DIRSRV_STATE_ONLINE:
> raise ValueError("Invalid state. Cannot set properties on instance that is not ONLINE.")
E ValueError: Invalid state. Cannot set properties on instance that is not ONLINE.

/usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:438: ValueError
Failed suites/vlv/regression_test.py::test_bulk_import_when_the_backend_with_vlv_was_recreated 0.34
self = <lib389.mappingTree.MappingTreeLegacy object at 0x7fd1740daa90>
suffix = 'dc=example,dc=com', bename = 'userRoot', parent = None

def create(self, suffix=None, bename=None, parent=None):
'''
Create a mapping tree entry (under "cn=mapping tree,cn=config"),
for the 'suffix' and that is stored in 'bename' backend.
'bename' backend must exist before creating the mapping tree entry.

If a 'parent' is provided that means that we are creating a
sub-suffix mapping tree.

@param suffix - suffix mapped by this mapping tree entry. It will
be the common name ('cn') of the entry
@param benamebase - backend common name (e.g. 'userRoot')
@param parent - if provided is a parent suffix of 'suffix'

@return DN of the mapping tree entry

@raise ldap.NO_SUCH_OBJECT - if the backend entry or parent mapping
tree does not exist
ValueError - if missing a parameter,

'''
# Check suffix is provided
if not suffix:
raise ValueError("suffix is mandatory")
else:
nsuffix = normalizeDN(suffix)

# Check backend name is provided
if not bename:
raise ValueError("backend name is mandatory")

# Check that if the parent suffix is provided then
# it exists a mapping tree for it
if parent:
nparent = normalizeDN(parent)
filt = suffixfilt(parent)
try:
entry = self.conn.getEntry(DN_MAPPING_TREE, ldap.SCOPE_SUBTREE,
filt)
pass
except NoSuchEntryError:
raise ValueError("parent suffix has no mapping tree")
else:
nparent = ""

# Check if suffix exists, return
filt = suffixfilt(suffix)
try:
entry = self.conn.getEntry(DN_MAPPING_TREE, ldap.SCOPE_SUBTREE,
filt)
return entry
except ldap.NO_SUCH_OBJECT:
entry = None

#
# Now start the real work
#

# fix me when we can actually used escaped DNs
dn = ','.join(('cn="%s"' % nsuffix, DN_MAPPING_TREE))
entry = Entry(dn)
entry.update({
'objectclass': ['top', 'extensibleObject', MT_OBJECTCLASS_VALUE],
'nsslapd-state': 'backend',
# the value in the dn has to be DN escaped
# internal code will add the quoted value - unquoted value is
# useful for searching.
MT_PROPNAME_TO_ATTRNAME[MT_SUFFIX]: nsuffix,
MT_PROPNAME_TO_ATTRNAME[MT_BACKEND]: bename
})

# possibly add the parent
if parent:
entry.setValues(MT_PROPNAME_TO_ATTRNAME[MT_PARENT_SUFFIX], nparent)

try:
self.log.debug("Creating entry: %s", entry.dn)
self.log.info("Entry %r", entry)
> self.conn.add_s(entry)

/usr/local/lib/python3.8/site-packages/lib389/mappingTree.py:155:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

args = (dn: cn="dc=example,dc=com",cn=mapping tree,cn=config
cn: dc=example,dc=com
nsslapd-backend: userRoot
nsslapd-state: backend
objectclass: top
objectclass: extensibleObject
objectclass: nsMappingTree

,)
kwargs = {}
c_stack = [FrameInfo(frame=<frame at 0x7fd1676e2640, file '/usr/local/lib/python3.8/site-packages/lib389/__init__.py', line 176,...mbda>', code_context=[' self._inner_hookexec = lambda hook, methods, kwargs: hook.multicall(\n'], index=0), ...]
frame = FrameInfo(frame=<frame at 0x559a4f491a90, file '/usr/local/lib/python3.8/site-packages/lib389/mappingTree.py', line 15.../lib389/mappingTree.py', lineno=155, function='create', code_context=[' self.conn.add_s(entry)\n'], index=0)
ent = dn: cn="dc=example,dc=com",cn=mapping tree,cn=config
cn: dc=example,dc=com
nsslapd-backend: userRoot
nsslapd-state: backend
objectclass: top
objectclass: extensibleObject
objectclass: nsMappingTree



def inner(*args, **kwargs):
if name in [
'add_s',
'bind_s',
'delete_s',
'modify_s',
'modrdn_s',
'rename_s',
'sasl_interactive_bind_s',
'search_s',
'search_ext_s',
'simple_bind_s',
'unbind_s',
'getEntry',
] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'):
c_stack = inspect.stack()
frame = c_stack[1]

warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. "
"Found in: %s:%s" % (name, frame.filename, frame.lineno)))
# Later, we will add a sleep here to make it even more painful.
# Finally, it will raise an exception.
elif 'escapehatch' in kwargs:
kwargs.pop('escapehatch')

if name == 'result':
objtype, data = f(*args, **kwargs)
# data is either a 2-tuple or a list of 2-tuples
# print data
if data:
if isinstance(data, tuple):
return objtype, Entry(data)
elif isinstance(data, list):
# AD sends back these search references
# if objtype == ldap.RES_SEARCH_RESULT and \
# isinstance(data[-1],tuple) and \
# not data[-1][0]:
# print "Received search reference: "
# pprint.pprint(data[-1][1])
# data.pop() # remove the last non-entry element

return objtype, [Entry(x) for x in data]
else:
raise TypeError("unknown data type %s returned by result" %
type(data))
else:
return objtype, data
elif name.startswith('add'):
# the first arg is self
# the second and third arg are the dn and the data to send
# We need to convert the Entry into the format used by
# python-ldap
ent = args[0]
if isinstance(ent, Entry):
> return f(ent.dn, ent.toTupleList(), *args[2:])

/usr/local/lib/python3.8/site-packages/lib389/__init__.py:176:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd167597220>
dn = 'cn="dc=example,dc=com",cn=mapping tree,cn=config'
modlist = [('objectclass', [b'top', b'extensibleObject', b'nsMappingTree']), ('nsslapd-state', [b'backend']), ('cn', [b'dc=example,dc=com']), ('nsslapd-backend', [b'userRoot'])]

def add_s(self,dn,modlist):
> return self.add_ext_s(dn,modlist,None,None)

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:439:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

args = ('cn="dc=example,dc=com",cn=mapping tree,cn=config', [('objectclass', [b'top', b'extensibleObject', b'nsMappingTree']), ('nsslapd-state', [b'backend']), ('cn', [b'dc=example,dc=com']), ('nsslapd-backend', [b'userRoot'])], None, None)
kwargs = {}, ent = 'cn="dc=example,dc=com",cn=mapping tree,cn=config'

def inner(*args, **kwargs):
if name in [
'add_s',
'bind_s',
'delete_s',
'modify_s',
'modrdn_s',
'rename_s',
'sasl_interactive_bind_s',
'search_s',
'search_ext_s',
'simple_bind_s',
'unbind_s',
'getEntry',
] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'):
c_stack = inspect.stack()
frame = c_stack[1]

warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. "
"Found in: %s:%s" % (name, frame.filename, frame.lineno)))
# Later, we will add a sleep here to make it even more painful.
# Finally, it will raise an exception.
elif 'escapehatch' in kwargs:
kwargs.pop('escapehatch')

if name == 'result':
objtype, data = f(*args, **kwargs)
# data is either a 2-tuple or a list of 2-tuples
# print data
if data:
if isinstance(data, tuple):
return objtype, Entry(data)
elif isinstance(data, list):
# AD sends back these search references
# if objtype == ldap.RES_SEARCH_RESULT and \
# isinstance(data[-1],tuple) and \
# not data[-1][0]:
# print "Received search reference: "
# pprint.pprint(data[-1][1])
# data.pop() # remove the last non-entry element

return objtype, [Entry(x) for x in data]
else:
raise TypeError("unknown data type %s returned by result" %
type(data))
else:
return objtype, data
elif name.startswith('add'):
# the first arg is self
# the second and third arg are the dn and the data to send
# We need to convert the Entry into the format used by
# python-ldap
ent = args[0]
if isinstance(ent, Entry):
return f(ent.dn, ent.toTupleList(), *args[2:])
else:
> return f(*args, **kwargs)

/usr/local/lib/python3.8/site-packages/lib389/__init__.py:178:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd167597220>
dn = 'cn="dc=example,dc=com",cn=mapping tree,cn=config'
modlist = [('objectclass', [b'top', b'extensibleObject', b'nsMappingTree']), ('nsslapd-state', [b'backend']), ('cn', [b'dc=example,dc=com']), ('nsslapd-backend', [b'userRoot'])]
serverctrls = None, clientctrls = None

def add_ext_s(self,dn,modlist,serverctrls=None,clientctrls=None):
msgid = self.add_ext(dn,modlist,serverctrls,clientctrls)
> resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout)

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:425:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

args = (76,), kwargs = {'all': 1, 'timeout': -1}

def inner(*args, **kwargs):
if name in [
'add_s',
'bind_s',
'delete_s',
'modify_s',
'modrdn_s',
'rename_s',
'sasl_interactive_bind_s',
'search_s',
'search_ext_s',
'simple_bind_s',
'unbind_s',
'getEntry',
] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'):
c_stack = inspect.stack()
frame = c_stack[1]

warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. "
"Found in: %s:%s" % (name, frame.filename, frame.lineno)))
# Later, we will add a sleep here to make it even more painful.
# Finally, it will raise an exception.
elif 'escapehatch' in kwargs:
kwargs.pop('escapehatch')

if name == 'result':
objtype, data = f(*args, **kwargs)
# data is either a 2-tuple or a list of 2-tuples
# print data
if data:
if isinstance(data, tuple):
return objtype, Entry(data)
elif isinstance(data, list):
# AD sends back these search references
# if objtype == ldap.RES_SEARCH_RESULT and \
# isinstance(data[-1],tuple) and \
# not data[-1][0]:
# print "Received search reference: "
# pprint.pprint(data[-1][1])
# data.pop() # remove the last non-entry element

return objtype, [Entry(x) for x in data]
else:
raise TypeError("unknown data type %s returned by result" %
type(data))
else:
return objtype, data
elif name.startswith('add'):
# the first arg is self
# the second and third arg are the dn and the data to send
# We need to convert the Entry into the format used by
# python-ldap
ent = args[0]
if isinstance(ent, Entry):
return f(ent.dn, ent.toTupleList(), *args[2:])
else:
return f(*args, **kwargs)
else:
> return f(*args, **kwargs)

/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd167597220>, msgid = 76, all = 1
timeout = -1, resp_ctrl_classes = None

def result3(self,msgid=ldap.RES_ANY,all=1,timeout=None,resp_ctrl_classes=None):
> resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4(
msgid,all,timeout,
add_ctrls=0,add_intermediates=0,add_extop=0,
resp_ctrl_classes=resp_ctrl_classes
)

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:764:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

args = (76, 1, -1)
kwargs = {'add_ctrls': 0, 'add_extop': 0, 'add_intermediates': 0, 'resp_ctrl_classes': None}

def inner(*args, **kwargs):
if name in [
'add_s',
'bind_s',
'delete_s',
'modify_s',
'modrdn_s',
'rename_s',
'sasl_interactive_bind_s',
'search_s',
'search_ext_s',
'simple_bind_s',
'unbind_s',
'getEntry',
] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'):
c_stack = inspect.stack()
frame = c_stack[1]

warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. "
"Found in: %s:%s" % (name, frame.filename, frame.lineno)))
# Later, we will add a sleep here to make it even more painful.
# Finally, it will raise an exception.
elif 'escapehatch' in kwargs:
kwargs.pop('escapehatch')

if name == 'result':
objtype, data = f(*args, **kwargs)
# data is either a 2-tuple or a list of 2-tuples
# print data
if data:
if isinstance(data, tuple):
return objtype, Entry(data)
elif isinstance(data, list):
# AD sends back these search references
# if objtype == ldap.RES_SEARCH_RESULT and \
# isinstance(data[-1],tuple) and \
# not data[-1][0]:
# print "Received search reference: "
# pprint.pprint(data[-1][1])
# data.pop() # remove the last non-entry element

return objtype, [Entry(x) for x in data]
else:
raise TypeError("unknown data type %s returned by result" %
type(data))
else:
return objtype, data
elif name.startswith('add'):
# the first arg is self
# the second and third arg are the dn and the data to send
# We need to convert the Entry into the format used by
# python-ldap
ent = args[0]
if isinstance(ent, Entry):
return f(ent.dn, ent.toTupleList(), *args[2:])
else:
return f(*args, **kwargs)
else:
> return f(*args, **kwargs)

/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd167597220>, msgid = 76, all = 1
timeout = -1, add_ctrls = 0, add_intermediates = 0, add_extop = 0
resp_ctrl_classes = None

def result4(self,msgid=ldap.RES_ANY,all=1,timeout=None,add_ctrls=0,add_intermediates=0,add_extop=0,resp_ctrl_classes=None):
if timeout is None:
timeout = self.timeout
> ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop)

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:774:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

args = (<built-in method result4 of LDAP object at 0x7fd1740da480>, 76, 1, -1, 0, 0, ...)
kwargs = {}

def inner(*args, **kwargs):
if name in [
'add_s',
'bind_s',
'delete_s',
'modify_s',
'modrdn_s',
'rename_s',
'sasl_interactive_bind_s',
'search_s',
'search_ext_s',
'simple_bind_s',
'unbind_s',
'getEntry',
] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'):
c_stack = inspect.stack()
frame = c_stack[1]

warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. "
"Found in: %s:%s" % (name, frame.filename, frame.lineno)))
# Later, we will add a sleep here to make it even more painful.
# Finally, it will raise an exception.
elif 'escapehatch' in kwargs:
kwargs.pop('escapehatch')

if name == 'result':
objtype, data = f(*args, **kwargs)
# data is either a 2-tuple or a list of 2-tuples
# print data
if data:
if isinstance(data, tuple):
return objtype, Entry(data)
elif isinstance(data, list):
# AD sends back these search references
# if objtype == ldap.RES_SEARCH_RESULT and \
# isinstance(data[-1],tuple) and \
# not data[-1][0]:
# print "Received search reference: "
# pprint.pprint(data[-1][1])
# data.pop() # remove the last non-entry element

return objtype, [Entry(x) for x in data]
else:
raise TypeError("unknown data type %s returned by result" %
type(data))
else:
return objtype, data
elif name.startswith('add'):
# the first arg is self
# the second and third arg are the dn and the data to send
# We need to convert the Entry into the format used by
# python-ldap
ent = args[0]
if isinstance(ent, Entry):
return f(ent.dn, ent.toTupleList(), *args[2:])
else:
return f(*args, **kwargs)
else:
> return f(*args, **kwargs)

/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd167597220>
func = <built-in method result4 of LDAP object at 0x7fd1740da480>
args = (76, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None
exc_type = None, exc_value = None, exc_traceback = None

def _ldap_call(self,func,*args,**kwargs):
"""
Wrapper method mainly for serializing calls into OpenLDAP libs
and trace logs
"""
self._ldap_object_lock.acquire()
if __debug__:
if self._trace_level>=1:
self._trace_file.write('*** %s %s - %s\n%s\n' % (
repr(self),
self._uri,
'.'.join((self.__class__.__name__,func.__name__)),
pprint.pformat((args,kwargs))
))
if self._trace_level>=9:
traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)
diagnostic_message_success = None
try:
try:
result = func(*args,**kwargs)
if __debug__ and self._trace_level>=2:
if func.__name__!="unbind_ext":
diagnostic_message_success = self._l.get_option(ldap.OPT_DIAGNOSTIC_MESSAGE)
finally:
self._ldap_object_lock.release()
except LDAPError as e:
exc_type,exc_value,exc_traceback = sys.exc_info()
try:
if 'info' not in e.args[0] and 'errno' in e.args[0]:
e.args[0]['info'] = strerror(e.args[0]['errno'])
except IndexError:
pass
if __debug__ and self._trace_level>=2:
self._trace_file.write('=> LDAPError - %s: %s\n' % (e.__class__.__name__,str(e)))
try:
> reraise(exc_type, exc_value, exc_traceback)

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:340:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

exc_type = <class 'ldap.UNWILLING_TO_PERFORM'>
exc_value = UNWILLING_TO_PERFORM({'msgtype': 105, 'msgid': 76, 'result': 53, 'desc': 'Server is unwilling to perform', 'ctrls': []})
exc_traceback = <traceback object at 0x7fd1761e9fc0>

def reraise(exc_type, exc_value, exc_traceback):
"""Re-raise an exception given information from sys.exc_info()

Note that unlike six.reraise, this does not support replacing the
traceback. All arguments must come from a single sys.exc_info() call.
"""
# In Python 3, all exception info is contained in one object.
> raise exc_value

/usr/local/lib64/python3.8/site-packages/ldap/compat.py:46:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd167597220>
func = <built-in method result4 of LDAP object at 0x7fd1740da480>
args = (76, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None
exc_type = None, exc_value = None, exc_traceback = None

def _ldap_call(self,func,*args,**kwargs):
"""
Wrapper method mainly for serializing calls into OpenLDAP libs
and trace logs
"""
self._ldap_object_lock.acquire()
if __debug__:
if self._trace_level>=1:
self._trace_file.write('*** %s %s - %s\n%s\n' % (
repr(self),
self._uri,
'.'.join((self.__class__.__name__,func.__name__)),
pprint.pformat((args,kwargs))
))
if self._trace_level>=9:
traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)
diagnostic_message_success = None
try:
try:
> result = func(*args,**kwargs)
E ldap.UNWILLING_TO_PERFORM: {'msgtype': 105, 'msgid': 76, 'result': 53, 'desc': 'Server is unwilling to perform', 'ctrls': []}

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:324: UNWILLING_TO_PERFORM

During handling of the above exception, another exception occurred:

topology_m2 = <lib389.topologies.TopologyMain object at 0x7fd1675db400>

@pytest.mark.DS47966
def test_bulk_import_when_the_backend_with_vlv_was_recreated(topology_m2):
"""
Testing bulk import when the backend with VLV was recreated.
If the test passes without the server crash, 47966 is verified.

:id: 512963fa-fe02-11e8-b1d3-8c16451d917b
:setup: Replication with two masters.
:steps:
1. Generate vlvSearch entry
2. Generate vlvIndex entry
3. Delete the backend instance on Master 2
4. Delete the agreement, replica, and mapping tree, too.
5. Recreate the backend and the VLV index on Master 2.
6. Recreating vlvSrchDn and vlvIndexDn on Master 2.
:expectedresults:
1. Should Success.
2. Should Success.
3. Should Success.
4. Should Success.
5. Should Success.
6. Should Success.
"""
M1 = topology_m2.ms["master1"]
M2 = topology_m2.ms["master2"]
# generate vlvSearch entry
properties_for_search = {
"objectclass": ["top", "vlvSearch"],
"cn": "vlvSrch",
"vlvbase": DEFAULT_SUFFIX,
"vlvfilter": "(|(objectclass=*)(objectclass=ldapsubentry))",
"vlvscope": "2",
}
vlv_searches = VLVSearch(M2)
userroot_vlvsearch = vlv_searches.create(
basedn="cn=userRoot,cn=ldbm database,cn=plugins,cn=config",
properties=properties_for_search,
)
assert "cn=vlvSrch,cn=userRoot,cn=ldbm database,cn=plugins,cn=config" in M2.getEntry(
"cn=vlvSrch,cn=userRoot,cn=ldbm database,cn=plugins,cn=config").dn
# generate vlvIndex entry
properties_for_index = {
"objectclass": ["top", "vlvIndex"],
"cn": "vlvIdx",
"vlvsort": "cn ou sn",
}
vlv_index = VLVIndex(M2)
userroot_index = vlv_index.create(
basedn="cn=vlvSrch,cn=userRoot,cn=ldbm database,cn=plugins,cn=config",
properties=properties_for_index,
)
assert "cn=vlvIdx,cn=vlvSrch,cn=userRoot,cn=ldbm database,cn=plugins,cn=config" in M2.getEntry(
"cn=vlvIdx,cn=vlvSrch,cn=userRoot,cn=ldbm database,cn=plugins,cn=config").dn
# Delete the backend instance on Master 2."
userroot_index.delete()
userroot_vlvsearch.delete_all()
# delete the agreement, replica, and mapping tree, too.
repl = ReplicationManager(DEFAULT_SUFFIX)
repl.remove_master(M2)
MappingTrees(M2).list()[0].delete()
Backends(M2).list()[0].delete()
# Recreate the backend and the VLV index on Master 2.
> M2.mappingtree.create(DEFAULT_SUFFIX, "userRoot")

suites/vlv/regression_test.py:87:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.mappingTree.MappingTreeLegacy object at 0x7fd1740daa90>
suffix = 'dc=example,dc=com', bename = 'userRoot', parent = None

def create(self, suffix=None, bename=None, parent=None):
'''
Create a mapping tree entry (under "cn=mapping tree,cn=config"),
for the 'suffix' and that is stored in 'bename' backend.
'bename' backend must exist before creating the mapping tree entry.

If a 'parent' is provided that means that we are creating a
sub-suffix mapping tree.

@param suffix - suffix mapped by this mapping tree entry. It will
be the common name ('cn') of the entry
@param benamebase - backend common name (e.g. 'userRoot')
@param parent - if provided is a parent suffix of 'suffix'

@return DN of the mapping tree entry

@raise ldap.NO_SUCH_OBJECT - if the backend entry or parent mapping
tree does not exist
ValueError - if missing a parameter,

'''
# Check suffix is provided
if not suffix:
raise ValueError("suffix is mandatory")
else:
nsuffix = normalizeDN(suffix)

# Check backend name is provided
if not bename:
raise ValueError("backend name is mandatory")

# Check that if the parent suffix is provided then
# it exists a mapping tree for it
if parent:
nparent = normalizeDN(parent)
filt = suffixfilt(parent)
try:
entry = self.conn.getEntry(DN_MAPPING_TREE, ldap.SCOPE_SUBTREE,
filt)
pass
except NoSuchEntryError:
raise ValueError("parent suffix has no mapping tree")
else:
nparent = ""

# Check if suffix exists, return
filt = suffixfilt(suffix)
try:
entry = self.conn.getEntry(DN_MAPPING_TREE, ldap.SCOPE_SUBTREE,
filt)
return entry
except ldap.NO_SUCH_OBJECT:
entry = None

#
# Now start the real work
#

# fix me when we can actually used escaped DNs
dn = ','.join(('cn="%s"' % nsuffix, DN_MAPPING_TREE))
entry = Entry(dn)
entry.update({
'objectclass': ['top', 'extensibleObject', MT_OBJECTCLASS_VALUE],
'nsslapd-state': 'backend',
# the value in the dn has to be DN escaped
# internal code will add the quoted value - unquoted value is
# useful for searching.
MT_PROPNAME_TO_ATTRNAME[MT_SUFFIX]: nsuffix,
MT_PROPNAME_TO_ATTRNAME[MT_BACKEND]: bename
})

# possibly add the parent
if parent:
entry.setValues(MT_PROPNAME_TO_ATTRNAME[MT_PARENT_SUFFIX], nparent)

try:
self.log.debug("Creating entry: %s", entry.dn)
self.log.info("Entry %r", entry)
self.conn.add_s(entry)
except ldap.LDAPError as e:
> raise ldap.LDAPError("Error adding suffix entry " + dn, e)
E ldap.LDAPError: ('Error adding suffix entry cn="dc=example,dc=com",cn=mapping tree,cn=config', UNWILLING_TO_PERFORM({'msgtype': 105, 'msgid': 76, 'result': 53, 'desc': 'Server is unwilling to perform', 'ctrls': []}))

/usr/local/lib/python3.8/site-packages/lib389/mappingTree.py:157: LDAPError
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 2315364d-ab1d-459c-b4f2-e2dec01dfac4 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 3d280c83-3299-4fce-90de-2cffa32ea880 / got description=2315364d-ab1d-459c-b4f2-e2dec01dfac4) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
------------------------------Captured stdout call------------------------------
deleting vlv search: cn=vlvSrch,cn=userRoot,cn=ldbm database,cn=plugins,cn=config deleting vlv search entry...
-------------------------------Captured log call--------------------------------
INFO  lib389:mappingTree.py:154 Entry dn: cn="dc=example,dc=com",cn=mapping tree,cn=config cn: dc=example,dc=com nsslapd-backend: userRoot nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree
Failed tickets/ticket47781_test.py::test_ticket47781 3.60
topology_st = <lib389.topologies.TopologyMain object at 0x7fd167251bb0>

def test_ticket47781(topology_st):
"""
Testing for a deadlock after doing an online import of an LDIF with
replication data. The replication agreement should be invalid.
"""

log.info('Testing Ticket 47781 - Testing for deadlock after importing LDIF with replication data')

master = topology_st.standalone
repl = ReplicationManager(DEFAULT_SUFFIX)
repl.create_first_master(master)

properties = {RA_NAME: r'meTo_$host:$port',
RA_BINDDN: defaultProperties[REPLICATION_BIND_DN],
RA_BINDPW: defaultProperties[REPLICATION_BIND_PW],
RA_METHOD: defaultProperties[REPLICATION_BIND_METHOD],
RA_TRANSPORT_PROT: defaultProperties[REPLICATION_TRANSPORT]}
# The agreement should point to a server that does NOT exist (invalid port)
repl_agreement = master.agreement.create(suffix=DEFAULT_SUFFIX,
host=master.host,
port=5555,
properties=properties)

#
# add two entries
#
log.info('Adding two entries...')

master.add_s(Entry(('cn=entry1,dc=example,dc=com', {
'objectclass': 'top person'.split(),
'sn': 'user',
'cn': 'entry1'})))

master.add_s(Entry(('cn=entry2,dc=example,dc=com', {
'objectclass': 'top person'.split(),
'sn': 'user',
'cn': 'entry2'})))

#
# export the replication ldif
#
log.info('Exporting replication ldif...')
args = {EXPORT_REPL_INFO: True}
exportTask = Tasks(master)
exportTask.exportLDIF(DEFAULT_SUFFIX, None, "/tmp/export.ldif", args)

#
# Restart the server
#
log.info('Restarting server...')
master.stop()
master.start()

#
# Import the ldif
#
log.info('Import replication LDIF file...')
importTask = Tasks(master)
args = {TASK_WAIT: True}
> importTask.importLDIF(DEFAULT_SUFFIX, None, "/tmp/export.ldif", args)

tickets/ticket47781_test.py:85:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.tasks.Tasks object at 0x7fd1673577f0>
suffix = 'dc=example,dc=com', benamebase = None, input_file = '/tmp/export.ldif'
args = {'wait': True}

def importLDIF(self, suffix=None, benamebase=None, input_file=None,
args=None):
'''
Import from a LDIF format a given 'suffix' (or 'benamebase' that stores
that suffix). It uses an internal task to acheive this request.

If 'suffix' and 'benamebase' are specified, it uses 'benamebase' first
else 'suffix'.
If both 'suffix' and 'benamebase' are missing it raise ValueError

'input_file' is the ldif input file

@param suffix - suffix of the backend
@param benamebase - 'commonname'/'cn' of the backend (e.g. 'userRoot')
@param ldif_input - file that will contain the entries in LDIF format
to import
@param args - is a dictionary that contains modifier of the import task
wait: True/[False] - If True, 'export' waits for the completion
of the task before to return

@return None

@raise ValueError

'''
if self.conn.state != DIRSRV_STATE_ONLINE:
raise ValueError("Invalid Server State %s! Must be online" % self.conn.state)

# Checking the parameters
if not benamebase and not suffix:
raise ValueError("Specify either bename or suffix")

if not input_file:
raise ValueError("input_file is mandatory")

if not os.path.exists(input_file):
> raise ValueError("Import file (%s) does not exist" % input_file)
E ValueError: Import file (/tmp/export.ldif) does not exist

/usr/local/lib/python3.8/site-packages/lib389/tasks.py:473: ValueError
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  lib389:tasks.py:567 Export task export_10282020_234239 for file /tmp/export.ldif completed successfully
Failed tickets/ticket47988_test.py::test_ticket47988_init 4.00
topology_m2 = <lib389.topologies.TopologyMain object at 0x7fd166fdbf70>

def test_ticket47988_init(topology_m2):
"""
It adds
- Objectclass with MAY 'member'
- an entry ('bind_entry') with which we bind to test the 'SELFDN' operation
It deletes the anonymous aci

"""

_header(topology_m2, 'test_ticket47988_init')

# enable acl error logging
mod = [(ldap.MOD_REPLACE, 'nsslapd-errorlog-level', ensure_bytes(str(8192)))] # REPL
topology_m2.ms["master1"].modify_s(DN_CONFIG, mod)
topology_m2.ms["master2"].modify_s(DN_CONFIG, mod)

mod = [(ldap.MOD_REPLACE, 'nsslapd-accesslog-level', ensure_bytes(str(260)))] # Internal op
topology_m2.ms["master1"].modify_s(DN_CONFIG, mod)
topology_m2.ms["master2"].modify_s(DN_CONFIG, mod)

# add dummy entries
for cpt in range(MAX_OTHERS):
name = "%s%d" % (OTHER_NAME, cpt)
topology_m2.ms["master1"].add_s(Entry(("cn=%s,%s" % (name, SUFFIX), {
'objectclass': "top person".split(),
'sn': name,
'cn': name})))

# check that entry 0 is replicated before
loop = 0
entryDN = "cn=%s0,%s" % (OTHER_NAME, SUFFIX)
while loop <= 10:
try:
ent = topology_m2.ms["master2"].getEntry(entryDN, ldap.SCOPE_BASE, "(objectclass=*)", ['telephonenumber'])
break
except ldap.NO_SUCH_OBJECT:
time.sleep(1)
loop += 1
assert (loop <= 10)

topology_m2.ms["master1"].stop(timeout=10)
topology_m2.ms["master2"].stop(timeout=10)

# install the specific schema M1: ipa3.3, M2: ipa4.1
schema_file = os.path.join(topology_m2.ms["master1"].getDir(__file__, DATA_DIR), "ticket47988/schema_ipa3.3.tar.gz")
_install_schema(topology_m2.ms["master1"], schema_file)
schema_file = os.path.join(topology_m2.ms["master1"].getDir(__file__, DATA_DIR), "ticket47988/schema_ipa4.1.tar.gz")
_install_schema(topology_m2.ms["master2"], schema_file)

> topology_m2.ms["master1"].start(timeout=10)

/export/tests/tickets/ticket47988_test.py:157:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:1079: in start
subprocess.check_output(["systemctl", "start", "dirsrv@%s" % self.serverid], stderr=subprocess.STDOUT)
/usr/lib64/python3.8/subprocess.py:411: in check_output
return run(*popenargs, stdout=PIPE, timeout=timeout, check=True,
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

input = None, capture_output = False, timeout = None, check = True
popenargs = (['systemctl', 'start', 'dirsrv@master1'],)
kwargs = {'stderr': -2, 'stdout': -1}
process = <subprocess.Popen object at 0x7fd16748cbe0>
stdout = b'Job for dirsrv@master1.service failed because the control process exited with error code.\nSee "systemctl status dirsrv@master1.service" and "journalctl -xe" for details.\n'
stderr = None, retcode = 1

def run(*popenargs,
input=None, capture_output=False, timeout=None, check=False, **kwargs):
"""Run command with arguments and return a CompletedProcess instance.

The returned instance will have attributes args, returncode, stdout and
stderr. By default, stdout and stderr are not captured, and those attributes
will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them.

If check is True and the exit code was non-zero, it raises a
CalledProcessError. The CalledProcessError object will have the return code
in the returncode attribute, and output & stderr attributes if those streams
were captured.

If timeout is given, and the process takes too long, a TimeoutExpired
exception will be raised.

There is an optional argument "input", allowing you to
pass bytes or a string to the subprocess's stdin. If you use this argument
you may not also use the Popen constructor's "stdin" argument, as
it will be used internally.

By default, all communication is in bytes, and therefore any "input" should
be bytes, and the stdout and stderr will be bytes. If in text mode, any
"input" should be a string, and stdout and stderr will be strings decoded
according to locale encoding, or by "encoding" if set. Text mode is
triggered by setting any of text, encoding, errors or universal_newlines.

The other arguments are the same as for the Popen constructor.
"""
if input is not None:
if kwargs.get('stdin') is not None:
raise ValueError('stdin and input arguments may not both be used.')
kwargs['stdin'] = PIPE

if capture_output:
if kwargs.get('stdout') is not None or kwargs.get('stderr') is not None:
raise ValueError('stdout and stderr arguments may not be used '
'with capture_output.')
kwargs['stdout'] = PIPE
kwargs['stderr'] = PIPE

with Popen(*popenargs, **kwargs) as process:
try:
stdout, stderr = process.communicate(input, timeout=timeout)
except TimeoutExpired as exc:
process.kill()
if _mswindows:
# Windows accumulates the output in a single blocking
# read() call run on child threads, with the timeout
# being done in a join() on those threads. communicate()
# _after_ kill() is required to collect that and add it
# to the exception.
exc.stdout, exc.stderr = process.communicate()
else:
# POSIX _communicate already populated the output so
# far into the TimeoutExpired exception.
process.wait()
raise
except: # Including KeyboardInterrupt, communicate handled that.
process.kill()
# We don't call process.wait() as .__exit__ does that for us.
raise
retcode = process.poll()
if check and retcode:
> raise CalledProcessError(retcode, process.args,
output=stdout, stderr=stderr)
E subprocess.CalledProcessError: Command '['systemctl', 'start', 'dirsrv@master1']' returned non-zero exit status 1.

/usr/lib64/python3.8/subprocess.py:512: CalledProcessError
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0ccc9fb0-13d4-42bc-beb9-cf2492ebedeb / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 1f1d1e1d-775e-4da5-b9b9-71cefac954c2 / got description=0ccc9fb0-13d4-42bc-beb9-cf2492ebedeb) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47988_test.py:64 ############################################### INFO  lib389:ticket47988_test.py:65 ####### INFO  lib389:ticket47988_test.py:66 ####### test_ticket47988_init INFO  lib389:ticket47988_test.py:67 ####### INFO  lib389:ticket47988_test.py:68 ################################################### INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/02common.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/50ns-admin.ldif INFO  lib389:ticket47988_test.py:98 replace /etc/dirsrv/slapd-master1/schema/99user.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60nss-ldap.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60autofs.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/50ns-web.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60samba.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/10dna-plugin.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/05rfc4523.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60basev2.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/10automember-plugin.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/05rfc2927.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/10mep-plugin.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60ipadns.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/10rfc2307.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/50ns-mail.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/05rfc4524.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60trust.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60ipaconfig.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/50ns-directory.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60eduperson.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60mozilla.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/65ipasudo.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60rfc3712.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60rfc2739.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/50ns-value.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60acctpolicy.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/01core389.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60sabayon.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60pam-plugin.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/00core.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/25java-object.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60sudo.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/70ipaotp.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60pureftpd.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/61kerberos-ipav3.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60kerberos.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60basev3.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/06inetorgperson.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/30ns-common.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/28pilot.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/20subscriber.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/50ns-certificate.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master1/schema/60posix-winsync-plugin.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/02common.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/50ns-admin.ldif INFO  lib389:ticket47988_test.py:98 replace /etc/dirsrv/slapd-master2/schema/99user.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60nss-ldap.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60autofs.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/50ns-web.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60samba.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/10dna-plugin.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/05rfc4523.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60basev2.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/10automember-plugin.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/05rfc2927.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/10mep-plugin.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60ipadns.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/10rfc2307.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/50ns-mail.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/05rfc4524.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60trust.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60ipaconfig.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/50ns-directory.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60eduperson.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60mozilla.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/65ipasudo.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60rfc3712.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60rfc2739.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/50ns-value.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60acctpolicy.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/01core389.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60sabayon.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60pam-plugin.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/00core.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/25java-object.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60sudo.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/70ipaotp.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60pureftpd.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/61kerberos-ipav3.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60kerberos.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60basev3.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/06inetorgperson.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/30ns-common.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/28pilot.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/20subscriber.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/50ns-certificate.ldif INFO  lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-master2/schema/60posix-winsync-plugin.ldif
Failed tickets/ticket47988_test.py::test_ticket47988_1 0.00
topology_m2 = <lib389.topologies.TopologyMain object at 0x7fd166fdbf70>

def test_ticket47988_1(topology_m2):
'''
Check that replication is working and pause replication M2->M1
'''
_header(topology_m2, 'test_ticket47988_1')

topology_m2.ms["master1"].log.debug("\n\nCheck that replication is working and pause replication M2->M1\n")
> _do_update_entry(supplier=topology_m2.ms["master2"], consumer=topology_m2.ms["master1"], attempts=5)

/export/tests/tickets/ticket47988_test.py:234:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/export/tests/tickets/ticket47988_test.py:184: in _do_update_entry
supplier.modify_s(entryDN, mod)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:640: in modify_s
return self.modify_ext_s(dn,modlist,None,None)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:613: in modify_ext_s
resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:764: in result3
resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4(
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:774: in result4
ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call
reraise(exc_type, exc_value, exc_traceback)
/usr/local/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise
raise exc_value
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd166fd7a30>
func = <built-in method result4 of LDAP object at 0x7fd1672eafc0>
args = (26, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None
exc_type = None, exc_value = None, exc_traceback = None

def _ldap_call(self,func,*args,**kwargs):
"""
Wrapper method mainly for serializing calls into OpenLDAP libs
and trace logs
"""
self._ldap_object_lock.acquire()
if __debug__:
if self._trace_level>=1:
self._trace_file.write('*** %s %s - %s\n%s\n' % (
repr(self),
self._uri,
'.'.join((self.__class__.__name__,func.__name__)),
pprint.pformat((args,kwargs))
))
if self._trace_level>=9:
traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)
diagnostic_message_success = None
try:
try:
> result = func(*args,**kwargs)
E ldap.SERVER_DOWN: {'result': -1, 'desc': "Can't contact LDAP server", 'ctrls': []}

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:324: SERVER_DOWN
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47988_test.py:64 ############################################### INFO  lib389:ticket47988_test.py:65 ####### INFO  lib389:ticket47988_test.py:66 ####### test_ticket47988_1 INFO  lib389:ticket47988_test.py:67 ####### INFO  lib389:ticket47988_test.py:68 ###################################################
Failed tickets/ticket47988_test.py::test_ticket47988_2 0.01
topology_m2 = <lib389.topologies.TopologyMain object at 0x7fd166fdbf70>

def test_ticket47988_2(topology_m2):
'''
Update M1 schema and trigger update M1->M2
So M1 should learn new/extended definitions that are in M2 schema
'''
_header(topology_m2, 'test_ticket47988_2')

topology_m2.ms["master1"].log.debug("\n\nUpdate M1 schema and an entry on M1\n")
> master1_schema_csn = topology_m2.ms["master1"].schema.get_schema_csn()

/export/tests/tickets/ticket47988_test.py:246:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/usr/local/lib/python3.8/site-packages/lib389/schema.py:604: in get_schema_csn
ents = self.conn.search_s(DN_SCHEMA, ldap.SCOPE_BASE,
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:870: in search_s
return self.search_ext_s(base,scope,filterstr,attrlist,attrsonly,None,None,timeout=self.timeout)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:864: in search_ext_s
return self.result(msgid,all=1,timeout=timeout)[1]
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:148: in inner
objtype, data = f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:756: in result
resp_type, resp_data, resp_msgid = self.result2(msgid,all,timeout)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:760: in result2
resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all,timeout)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:764: in result3
resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4(
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:774: in result4
ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call
reraise(exc_type, exc_value, exc_traceback)
/usr/local/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise
raise exc_value
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd167251ee0>
func = <built-in method result4 of LDAP object at 0x7fd16756b690>
args = (62, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None
exc_type = None, exc_value = None, exc_traceback = None

def _ldap_call(self,func,*args,**kwargs):
"""
Wrapper method mainly for serializing calls into OpenLDAP libs
and trace logs
"""
self._ldap_object_lock.acquire()
if __debug__:
if self._trace_level>=1:
self._trace_file.write('*** %s %s - %s\n%s\n' % (
repr(self),
self._uri,
'.'.join((self.__class__.__name__,func.__name__)),
pprint.pformat((args,kwargs))
))
if self._trace_level>=9:
traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)
diagnostic_message_success = None
try:
try:
> result = func(*args,**kwargs)
E ldap.SERVER_DOWN: {'result': -1, 'desc': "Can't contact LDAP server", 'ctrls': []}

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:324: SERVER_DOWN
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47988_test.py:64 ############################################### INFO  lib389:ticket47988_test.py:65 ####### INFO  lib389:ticket47988_test.py:66 ####### test_ticket47988_2 INFO  lib389:ticket47988_test.py:67 ####### INFO  lib389:ticket47988_test.py:68 ###################################################
Failed tickets/ticket47988_test.py::test_ticket47988_3 0.00
topology_m2 = <lib389.topologies.TopologyMain object at 0x7fd166fdbf70>

def test_ticket47988_3(topology_m2):
'''
Resume replication M2->M1 and check replication is still working
'''
_header(topology_m2, 'test_ticket47988_3')

> _resume_M2_to_M1(topology_m2)

/export/tests/tickets/ticket47988_test.py:283:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/export/tests/tickets/ticket47988_test.py:222: in _resume_M2_to_M1
ents = topology_m2.ms["master2"].agreement.list(suffix=SUFFIX)
/usr/local/lib/python3.8/site-packages/lib389/agreement.py:905: in list
replica_entries = self.conn.replica.list(suffix)
/usr/local/lib/python3.8/site-packages/lib389/replica.py:178: in list
ents = self.conn.search_s(base, ldap.SCOPE_SUBTREE, filtr)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:870: in search_s
return self.search_ext_s(base,scope,filterstr,attrlist,attrsonly,None,None,timeout=self.timeout)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:863: in search_ext_s
msgid = self.search_ext(base,scope,filterstr,attrlist,attrsonly,serverctrls,clientctrls,timeout,sizelimit)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:853: in search_ext
return self._ldap_call(
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call
reraise(exc_type, exc_value, exc_traceback)
/usr/local/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise
raise exc_value
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd166fd7a30>
func = <built-in method search_ext of LDAP object at 0x7fd1672eafc0>
args = ('cn=mapping tree,cn=config', 2, '(&(objectclass=nsds5Replica)(nsDS5ReplicaRoot=dc=example,dc=com))', None, 0, None, ...)
kwargs = {}, diagnostic_message_success = None, exc_type = None
exc_value = None, exc_traceback = None

def _ldap_call(self,func,*args,**kwargs):
"""
Wrapper method mainly for serializing calls into OpenLDAP libs
and trace logs
"""
self._ldap_object_lock.acquire()
if __debug__:
if self._trace_level>=1:
self._trace_file.write('*** %s %s - %s\n%s\n' % (
repr(self),
self._uri,
'.'.join((self.__class__.__name__,func.__name__)),
pprint.pformat((args,kwargs))
))
if self._trace_level>=9:
traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)
diagnostic_message_success = None
try:
try:
> result = func(*args,**kwargs)
E ldap.SERVER_DOWN: {'result': -1, 'desc': "Can't contact LDAP server", 'ctrls': []}

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:324: SERVER_DOWN
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47988_test.py:64 ############################################### INFO  lib389:ticket47988_test.py:65 ####### INFO  lib389:ticket47988_test.py:66 ####### test_ticket47988_3 INFO  lib389:ticket47988_test.py:67 ####### INFO  lib389:ticket47988_test.py:68 ################################################### INFO  lib389:ticket47988_test.py:221 ######################### resume RA M2->M1 ######################
Failed tickets/ticket47988_test.py::test_ticket47988_4 0.00
topology_m2 = <lib389.topologies.TopologyMain object at 0x7fd166fdbf70>

def test_ticket47988_4(topology_m2):
'''
Check schemaCSN is identical on both server
And save the nsschemaCSN to later check they do not change unexpectedly
'''
_header(topology_m2, 'test_ticket47988_4')

> master1_schema_csn = topology_m2.ms["master1"].schema.get_schema_csn()

/export/tests/tickets/ticket47988_test.py:295:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/usr/local/lib/python3.8/site-packages/lib389/schema.py:604: in get_schema_csn
ents = self.conn.search_s(DN_SCHEMA, ldap.SCOPE_BASE,
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:870: in search_s
return self.search_ext_s(base,scope,filterstr,attrlist,attrsonly,None,None,timeout=self.timeout)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:863: in search_ext_s
msgid = self.search_ext(base,scope,filterstr,attrlist,attrsonly,serverctrls,clientctrls,timeout,sizelimit)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:853: in search_ext
return self._ldap_call(
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call
reraise(exc_type, exc_value, exc_traceback)
/usr/local/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise
raise exc_value
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd167251ee0>
func = <built-in method search_ext of LDAP object at 0x7fd16756b690>
args = ('cn=schema', 0, 'objectclass=*', ['nsSchemaCSN'], 0, None, ...)
kwargs = {}, diagnostic_message_success = None, exc_type = None
exc_value = None, exc_traceback = None

def _ldap_call(self,func,*args,**kwargs):
"""
Wrapper method mainly for serializing calls into OpenLDAP libs
and trace logs
"""
self._ldap_object_lock.acquire()
if __debug__:
if self._trace_level>=1:
self._trace_file.write('*** %s %s - %s\n%s\n' % (
repr(self),
self._uri,
'.'.join((self.__class__.__name__,func.__name__)),
pprint.pformat((args,kwargs))
))
if self._trace_level>=9:
traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)
diagnostic_message_success = None
try:
try:
> result = func(*args,**kwargs)
E ldap.SERVER_DOWN: {'result': -1, 'desc': "Can't contact LDAP server", 'ctrls': []}

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:324: SERVER_DOWN
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47988_test.py:64 ############################################### INFO  lib389:ticket47988_test.py:65 ####### INFO  lib389:ticket47988_test.py:66 ####### test_ticket47988_4 INFO  lib389:ticket47988_test.py:67 ####### INFO  lib389:ticket47988_test.py:68 ###################################################
Failed tickets/ticket47988_test.py::test_ticket47988_5 0.00
topology_m2 = <lib389.topologies.TopologyMain object at 0x7fd166fdbf70>

def test_ticket47988_5(topology_m2):
'''
Check schemaCSN do not change unexpectedly
'''
_header(topology_m2, 'test_ticket47988_5')

> _do_update_entry(supplier=topology_m2.ms["master1"], consumer=topology_m2.ms["master2"], attempts=5)

/export/tests/tickets/ticket47988_test.py:313:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/export/tests/tickets/ticket47988_test.py:184: in _do_update_entry
supplier.modify_s(entryDN, mod)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:640: in modify_s
return self.modify_ext_s(dn,modlist,None,None)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:612: in modify_ext_s
msgid = self.modify_ext(dn,modlist,serverctrls,clientctrls)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:609: in modify_ext
return self._ldap_call(self._l.modify_ext,dn,modlist,RequestControlTuples(serverctrls),RequestControlTuples(clientctrls))
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call
reraise(exc_type, exc_value, exc_traceback)
/usr/local/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise
raise exc_value
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd167251ee0>
func = <built-in method modify_ext of LDAP object at 0x7fd16756b690>
args = ('cn=other_entry0,dc=example,dc=com', [(2, 'telephonenumber', b'198')], None, None)
kwargs = {}, diagnostic_message_success = None, exc_type = None
exc_value = None, exc_traceback = None

def _ldap_call(self,func,*args,**kwargs):
"""
Wrapper method mainly for serializing calls into OpenLDAP libs
and trace logs
"""
self._ldap_object_lock.acquire()
if __debug__:
if self._trace_level>=1:
self._trace_file.write('*** %s %s - %s\n%s\n' % (
repr(self),
self._uri,
'.'.join((self.__class__.__name__,func.__name__)),
pprint.pformat((args,kwargs))
))
if self._trace_level>=9:
traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)
diagnostic_message_success = None
try:
try:
> result = func(*args,**kwargs)
E ldap.SERVER_DOWN: {'result': -1, 'desc': "Can't contact LDAP server", 'ctrls': []}

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:324: SERVER_DOWN
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47988_test.py:64 ############################################### INFO  lib389:ticket47988_test.py:65 ####### INFO  lib389:ticket47988_test.py:66 ####### test_ticket47988_5 INFO  lib389:ticket47988_test.py:67 ####### INFO  lib389:ticket47988_test.py:68 ###################################################
Failed tickets/ticket47988_test.py::test_ticket47988_6 0.00
topology_m2 = <lib389.topologies.TopologyMain object at 0x7fd166fdbf70>

def test_ticket47988_6(topology_m2):
'''
Update M1 schema and trigger update M2->M1
So M2 should learn new/extended definitions that are in M1 schema
'''

_header(topology_m2, 'test_ticket47988_6')

topology_m2.ms["master1"].log.debug("\n\nUpdate M1 schema and an entry on M1\n")
> master1_schema_csn = topology_m2.ms["master1"].schema.get_schema_csn()

/export/tests/tickets/ticket47988_test.py:336:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/usr/local/lib/python3.8/site-packages/lib389/schema.py:604: in get_schema_csn
ents = self.conn.search_s(DN_SCHEMA, ldap.SCOPE_BASE,
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:870: in search_s
return self.search_ext_s(base,scope,filterstr,attrlist,attrsonly,None,None,timeout=self.timeout)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:863: in search_ext_s
msgid = self.search_ext(base,scope,filterstr,attrlist,attrsonly,serverctrls,clientctrls,timeout,sizelimit)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:853: in search_ext
return self._ldap_call(
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call
reraise(exc_type, exc_value, exc_traceback)
/usr/local/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise
raise exc_value
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd167251ee0>
func = <built-in method search_ext of LDAP object at 0x7fd16756b690>
args = ('cn=schema', 0, 'objectclass=*', ['nsSchemaCSN'], 0, None, ...)
kwargs = {}, diagnostic_message_success = None, exc_type = None
exc_value = None, exc_traceback = None

def _ldap_call(self,func,*args,**kwargs):
"""
Wrapper method mainly for serializing calls into OpenLDAP libs
and trace logs
"""
self._ldap_object_lock.acquire()
if __debug__:
if self._trace_level>=1:
self._trace_file.write('*** %s %s - %s\n%s\n' % (
repr(self),
self._uri,
'.'.join((self.__class__.__name__,func.__name__)),
pprint.pformat((args,kwargs))
))
if self._trace_level>=9:
traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)
diagnostic_message_success = None
try:
try:
> result = func(*args,**kwargs)
E ldap.SERVER_DOWN: {'result': -1, 'desc': "Can't contact LDAP server", 'ctrls': []}

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:324: SERVER_DOWN
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47988_test.py:64 ############################################### INFO  lib389:ticket47988_test.py:65 ####### INFO  lib389:ticket47988_test.py:66 ####### test_ticket47988_6 INFO  lib389:ticket47988_test.py:67 ####### INFO  lib389:ticket47988_test.py:68 ###################################################
Failed tickets/ticket48005_test.py::test_ticket48005_setup 4.57
topology_st = <lib389.topologies.TopologyMain object at 0x7fd1674b5f70>

def test_ticket48005_setup(topology_st):
'''
allow dump core
generate a test ldif file using dbgen.pl
import the ldif
'''
log.info("Ticket 48005 setup...")
if hasattr(topology_st.standalone, 'prefix'):
prefix = topology_st.standalone.prefix
else:
prefix = None
sysconfig_dirsrv = os.path.join(topology_st.standalone.get_initconfig_dir(), 'dirsrv')
cmdline = 'egrep "ulimit -c unlimited" %s' % sysconfig_dirsrv
p = os.popen(cmdline, "r")
ulimitc = p.readline()
if ulimitc == "":
log.info('No ulimit -c in %s' % sysconfig_dirsrv)
log.info('Adding it')
cmdline = 'echo "ulimit -c unlimited" >> %s' % sysconfig_dirsrv

sysconfig_dirsrv_systemd = sysconfig_dirsrv + ".systemd"
cmdline = 'egrep LimitCORE=infinity %s' % sysconfig_dirsrv_systemd
p = os.popen(cmdline, "r")
lcore = p.readline()
if lcore == "":
log.info('No LimitCORE in %s' % sysconfig_dirsrv_systemd)
log.info('Adding it')
cmdline = 'echo LimitCORE=infinity >> %s' % sysconfig_dirsrv_systemd

topology_st.standalone.restart(timeout=10)

ldif_file = topology_st.standalone.get_ldif_dir() + "/ticket48005.ldif"
os.system('ls %s' % ldif_file)
os.system('rm -f %s' % ldif_file)
if hasattr(topology_st.standalone, 'prefix'):
prefix = topology_st.standalone.prefix
else:
prefix = ""
dbgen_prog = prefix + '/bin/dbgen.pl'
log.info('dbgen_prog: %s' % dbgen_prog)
os.system('%s -s %s -o %s -u -n 10000' % (dbgen_prog, SUFFIX, ldif_file))
cmdline = 'egrep dn: %s | wc -l' % ldif_file
p = os.popen(cmdline, "r")
dnnumstr = p.readline()
num = int(dnnumstr)
log.info("We have %d entries.\n", num)

importTask = Tasks(topology_st.standalone)
args = {TASK_WAIT: True}
> importTask.importLDIF(SUFFIX, None, ldif_file, args)

/export/tests/tickets/ticket48005_test.py:74:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.tasks.Tasks object at 0x7fd166e2c370>
suffix = 'dc=example,dc=com', benamebase = None
input_file = '/var/lib/dirsrv/slapd-standalone1/ldif/ticket48005.ldif'
args = {'wait': True}

def importLDIF(self, suffix=None, benamebase=None, input_file=None,
args=None):
'''
Import from a LDIF format a given 'suffix' (or 'benamebase' that stores
that suffix). It uses an internal task to acheive this request.

If 'suffix' and 'benamebase' are specified, it uses 'benamebase' first
else 'suffix'.
If both 'suffix' and 'benamebase' are missing it raise ValueError

'input_file' is the ldif input file

@param suffix - suffix of the backend
@param benamebase - 'commonname'/'cn' of the backend (e.g. 'userRoot')
@param ldif_input - file that will contain the entries in LDIF format
to import
@param args - is a dictionary that contains modifier of the import task
wait: True/[False] - If True, 'export' waits for the completion
of the task before to return

@return None

@raise ValueError

'''
if self.conn.state != DIRSRV_STATE_ONLINE:
raise ValueError("Invalid Server State %s! Must be online" % self.conn.state)

# Checking the parameters
if not benamebase and not suffix:
raise ValueError("Specify either bename or suffix")

if not input_file:
raise ValueError("input_file is mandatory")

if not os.path.exists(input_file):
> raise ValueError("Import file (%s) does not exist" % input_file)
E ValueError: Import file (/var/lib/dirsrv/slapd-standalone1/ldif/ticket48005.ldif) does not exist

/usr/local/lib/python3.8/site-packages/lib389/tasks.py:473: ValueError
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
------------------------------Captured stderr call------------------------------
grep: /etc/sysconfig/dirsrv: No such file or directory grep: /etc/sysconfig/dirsrv.systemd: No such file or directory ls: cannot access '/var/lib/dirsrv/slapd-standalone1/ldif/ticket48005.ldif': No such file or directory sh: /bin/dbgen.pl: No such file or directory grep: /var/lib/dirsrv/slapd-standalone1/ldif/ticket48005.ldif: No such file or directory
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48005_test:ticket48005_test.py:31 Ticket 48005 setup... INFO  tests.tickets.ticket48005_test:ticket48005_test.py:41 No ulimit -c in /etc/sysconfig/dirsrv INFO  tests.tickets.ticket48005_test:ticket48005_test.py:42 Adding it INFO  tests.tickets.ticket48005_test:ticket48005_test.py:50 No LimitCORE in /etc/sysconfig/dirsrv.systemd INFO  tests.tickets.ticket48005_test:ticket48005_test.py:51 Adding it INFO  tests.tickets.ticket48005_test:ticket48005_test.py:64 dbgen_prog: /bin/dbgen.pl INFO  tests.tickets.ticket48005_test:ticket48005_test.py:70 We have 0 entries.
Failed tickets/ticket48013_test.py::test_ticket48013 1.27
topology_st = <lib389.topologies.TopologyMain object at 0x7fd167535c70>

def test_ticket48013(topology_st):
'''
Content Synchonization: Test that invalid cookies are caught
'''

cookies = ('#', '##', 'a#a#a', 'a#a#1')

# Enable dynamic plugins
try:
topology_st.standalone.modify_s(DN_CONFIG, [(ldap.MOD_REPLACE, 'nsslapd-dynamic-plugins', b'on')])
except ldap.LDAPError as e:
log.error('Failed to enable dynamic plugin! {}'.format(e.args[0]['desc']))
assert False

# Enable retro changelog
topology_st.standalone.plugins.enable(name=PLUGIN_RETRO_CHANGELOG)

# Enbale content sync plugin
> topology_st.standalone.plugins.enable(name=PLUGIN_REPL_SYNC)

/export/tests/tickets/ticket48013_test.py:61:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/usr/local/lib/python3.8/site-packages/lib389/plugins.py:2105: in enable
plugin.enable()
/usr/local/lib/python3.8/site-packages/lib389/plugins.py:58: in enable
self.set('nsslapd-pluginEnabled', 'on')
/usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:446: in set
return self._instance.modify_ext_s(self._dn, [(action, key, value)],
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:613: in modify_ext_s
resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:764: in result3
resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4(
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:774: in result4
ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call
reraise(exc_type, exc_value, exc_traceback)
/usr/local/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise
raise exc_value
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd167535490>
func = <built-in method result4 of LDAP object at 0x7fd166d49540>
args = (7, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None
exc_type = None, exc_value = None, exc_traceback = None

def _ldap_call(self,func,*args,**kwargs):
"""
Wrapper method mainly for serializing calls into OpenLDAP libs
and trace logs
"""
self._ldap_object_lock.acquire()
if __debug__:
if self._trace_level>=1:
self._trace_file.write('*** %s %s - %s\n%s\n' % (
repr(self),
self._uri,
'.'.join((self.__class__.__name__,func.__name__)),
pprint.pformat((args,kwargs))
))
if self._trace_level>=9:
traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)
diagnostic_message_success = None
try:
try:
> result = func(*args,**kwargs)
E ldap.SERVER_DOWN: {'result': -1, 'desc': "Can't contact LDAP server", 'ctrls': []}

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:324: SERVER_DOWN
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Failed tickets/ticket48194_test.py::test_run_1 6.95
topology_st = <lib389.topologies.TopologyMain object at 0x7fd1675089a0>

def test_run_1(topology_st):
"""
Check nsSSL3Ciphers: +all
All ciphers are enabled except null.
Note: default allowWeakCipher (i.e., off) for +all
"""
_header(topology_st, 'Test Case 2 - Check the ciphers availability for "+all" with default allowWeakCiphers')

topology_st.standalone.simple_bind_s(DN_DM, PASSWORD)
topology_st.standalone.modify_s(CONFIG_DN, [(ldap.MOD_REPLACE, 'nsslapd-errorlog-level', b'64')])
# Make sure allowWeakCipher is not set.
topology_st.standalone.modify_s(ENCRYPTION_DN, [(ldap.MOD_DELETE, 'allowWeakCipher', None)])

log.info("\n######################### Restarting the server ######################\n")
topology_st.standalone.stop(timeout=10)
os.system('mv %s %s.48194_0' % (topology_st.standalone.errlog, topology_st.standalone.errlog))
os.system('touch %s' % (topology_st.standalone.errlog))
time.sleep(2)
topology_st.standalone.start(timeout=120)

> connectWithOpenssl(topology_st, 'DES-CBC3-SHA', False)

/export/tests/tickets/ticket48194_test.py:158:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

topology_st = <lib389.topologies.TopologyMain object at 0x7fd1675089a0>
cipher = 'DES-CBC3-SHA', expect = False

def connectWithOpenssl(topology_st, cipher, expect):
"""
Connect with the given cipher
Condition:
If expect is True, the handshake should be successful.
If expect is False, the handshake should be refused with
access log: "Cannot communicate securely with peer:
no common encryption algorithm(s)."
"""
log.info("Testing %s -- expect to handshake %s", cipher, "successfully" if expect else "failed")

myurl = 'localhost:%s' % LDAPSPORT
cmdline = ['/usr/bin/openssl', 's_client', '-connect', myurl, '-cipher', cipher]

strcmdline = " ".join(cmdline)
log.info("Running cmdline: %s", strcmdline)

try:
proc = subprocess.Popen(cmdline, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT)
except ValueError:
log.info("%s failed: %s", cmdline, ValueError)
proc.kill()

while True:
l = proc.stdout.readline()
if l == b"":
break
if b'Cipher is' in l:
log.info("Found: %s", l)
if expect:
if b'(NONE)' in l:
assert False
else:
proc.stdin.close()
assert True
else:
if b'(NONE)' in l:
assert True
else:
proc.stdin.close()
> assert False
E assert False

/export/tests/tickets/ticket48194_test.py:117: AssertionError
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket48194_test.py:40 ############################################### INFO  lib389:ticket48194_test.py:41 ####### Test Case 2 - Check the ciphers availability for "+all" with default allowWeakCiphers INFO  lib389:ticket48194_test.py:42 ############################################### INFO  lib389.utils:ticket48194_test.py:151 ######################### Restarting the server ###################### INFO  lib389.utils:ticket48194_test.py:86 Testing DES-CBC3-SHA -- expect to handshake failed INFO  lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher DES-CBC3-SHA INFO  lib389.utils:ticket48194_test.py:105 Found: b'New, TLSv1.3, Cipher is TLS_AES_128_GCM_SHA256\n'
Failed tickets/ticket48194_test.py::test_run_2 6.46
topology_st = <lib389.topologies.TopologyMain object at 0x7fd1675089a0>

def test_run_2(topology_st):
"""
Check nsSSL3Ciphers: +rsa_aes_128_sha,+rsa_aes_256_sha
rsa_aes_128_sha, tls_rsa_aes_128_sha, rsa_aes_256_sha, tls_rsa_aes_256_sha are enabled.
default allowWeakCipher
"""
_header(topology_st,
'Test Case 3 - Check the ciphers availability for "+rsa_aes_128_sha,+rsa_aes_256_sha" with default allowWeakCipher')

topology_st.standalone.simple_bind_s(DN_DM, PASSWORD)
topology_st.standalone.modify_s(ENCRYPTION_DN,
[(ldap.MOD_REPLACE, 'nsSSL3Ciphers', b'+rsa_aes_128_sha,+rsa_aes_256_sha')])

log.info("\n######################### Restarting the server ######################\n")
topology_st.standalone.stop(timeout=10)
os.system('mv %s %s.48194_1' % (topology_st.standalone.errlog, topology_st.standalone.errlog))
os.system('touch %s' % (topology_st.standalone.errlog))
time.sleep(2)
topology_st.standalone.start(timeout=120)

connectWithOpenssl(topology_st, 'DES-CBC3-SHA', False)
connectWithOpenssl(topology_st, 'AES256-SHA256', False)
> connectWithOpenssl(topology_st, 'AES128-SHA', True)

/export/tests/tickets/ticket48194_test.py:184:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

topology_st = <lib389.topologies.TopologyMain object at 0x7fd1675089a0>
cipher = 'AES128-SHA', expect = True

def connectWithOpenssl(topology_st, cipher, expect):
"""
Connect with the given cipher
Condition:
If expect is True, the handshake should be successful.
If expect is False, the handshake should be refused with
access log: "Cannot communicate securely with peer:
no common encryption algorithm(s)."
"""
log.info("Testing %s -- expect to handshake %s", cipher, "successfully" if expect else "failed")

myurl = 'localhost:%s' % LDAPSPORT
cmdline = ['/usr/bin/openssl', 's_client', '-connect', myurl, '-cipher', cipher]

strcmdline = " ".join(cmdline)
log.info("Running cmdline: %s", strcmdline)

try:
proc = subprocess.Popen(cmdline, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT)
except ValueError:
log.info("%s failed: %s", cmdline, ValueError)
proc.kill()

while True:
l = proc.stdout.readline()
if l == b"":
break
if b'Cipher is' in l:
log.info("Found: %s", l)
if expect:
if b'(NONE)' in l:
> assert False
E assert False

/export/tests/tickets/ticket48194_test.py:108: AssertionError
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket48194_test.py:40 ############################################### INFO  lib389:ticket48194_test.py:41 ####### Test Case 3 - Check the ciphers availability for "+rsa_aes_128_sha,+rsa_aes_256_sha" with default allowWeakCipher INFO  lib389:ticket48194_test.py:42 ############################################### INFO  lib389.utils:ticket48194_test.py:175 ######################### Restarting the server ###################### INFO  lib389.utils:ticket48194_test.py:86 Testing DES-CBC3-SHA -- expect to handshake failed INFO  lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher DES-CBC3-SHA INFO  lib389.utils:ticket48194_test.py:105 Found: b'New, (NONE), Cipher is (NONE)\n' INFO  lib389.utils:ticket48194_test.py:86 Testing AES256-SHA256 -- expect to handshake failed INFO  lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher AES256-SHA256 INFO  lib389.utils:ticket48194_test.py:105 Found: b'New, (NONE), Cipher is (NONE)\n' INFO  lib389.utils:ticket48194_test.py:86 Testing AES128-SHA -- expect to handshake successfully INFO  lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher AES128-SHA INFO  lib389.utils:ticket48194_test.py:105 Found: b'New, (NONE), Cipher is (NONE)\n'
Failed tickets/ticket48194_test.py::test_run_4 6.98
topology_st = <lib389.topologies.TopologyMain object at 0x7fd1675089a0>

def test_run_4(topology_st):
"""
Check no nsSSL3Ciphers
Default ciphers are enabled.
default allowWeakCipher
"""
_header(topology_st, 'Test Case 5 - Check no nsSSL3Ciphers (-all) with default allowWeakCipher')

topology_st.standalone.simple_bind_s(DN_DM, PASSWORD)
topology_st.standalone.modify_s(ENCRYPTION_DN, [(ldap.MOD_DELETE, 'nsSSL3Ciphers', b'-all')])

log.info("\n######################### Restarting the server ######################\n")
topology_st.standalone.stop(timeout=10)
os.system('mv %s %s.48194_3' % (topology_st.standalone.errlog, topology_st.standalone.errlog))
os.system('touch %s' % (topology_st.standalone.errlog))
time.sleep(2)
topology_st.standalone.start(timeout=120)

> connectWithOpenssl(topology_st, 'DES-CBC3-SHA', False)

/export/tests/tickets/ticket48194_test.py:228:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

topology_st = <lib389.topologies.TopologyMain object at 0x7fd1675089a0>
cipher = 'DES-CBC3-SHA', expect = False

def connectWithOpenssl(topology_st, cipher, expect):
"""
Connect with the given cipher
Condition:
If expect is True, the handshake should be successful.
If expect is False, the handshake should be refused with
access log: "Cannot communicate securely with peer:
no common encryption algorithm(s)."
"""
log.info("Testing %s -- expect to handshake %s", cipher, "successfully" if expect else "failed")

myurl = 'localhost:%s' % LDAPSPORT
cmdline = ['/usr/bin/openssl', 's_client', '-connect', myurl, '-cipher', cipher]

strcmdline = " ".join(cmdline)
log.info("Running cmdline: %s", strcmdline)

try:
proc = subprocess.Popen(cmdline, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT)
except ValueError:
log.info("%s failed: %s", cmdline, ValueError)
proc.kill()

while True:
l = proc.stdout.readline()
if l == b"":
break
if b'Cipher is' in l:
log.info("Found: %s", l)
if expect:
if b'(NONE)' in l:
assert False
else:
proc.stdin.close()
assert True
else:
if b'(NONE)' in l:
assert True
else:
proc.stdin.close()
> assert False
E assert False

/export/tests/tickets/ticket48194_test.py:117: AssertionError
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket48194_test.py:40 ############################################### INFO  lib389:ticket48194_test.py:41 ####### Test Case 5 - Check no nsSSL3Ciphers (-all) with default allowWeakCipher INFO  lib389:ticket48194_test.py:42 ############################################### INFO  lib389.utils:ticket48194_test.py:221 ######################### Restarting the server ###################### INFO  lib389.utils:ticket48194_test.py:86 Testing DES-CBC3-SHA -- expect to handshake failed INFO  lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher DES-CBC3-SHA INFO  lib389.utils:ticket48194_test.py:105 Found: b'New, TLSv1.3, Cipher is TLS_AES_128_GCM_SHA256\n'
Failed tickets/ticket48194_test.py::test_run_5 6.92
topology_st = <lib389.topologies.TopologyMain object at 0x7fd1675089a0>

def test_run_5(topology_st):
"""
Check nsSSL3Ciphers: default
Default ciphers are enabled.
default allowWeakCipher
"""
_header(topology_st, 'Test Case 6 - Check default nsSSL3Ciphers (default setting) with default allowWeakCipher')

topology_st.standalone.simple_bind_s(DN_DM, PASSWORD)
topology_st.standalone.modify_s(ENCRYPTION_DN, [(ldap.MOD_REPLACE, 'nsSSL3Ciphers', b'default')])

log.info("\n######################### Restarting the server ######################\n")
topology_st.standalone.stop(timeout=10)
os.system('mv %s %s.48194_4' % (topology_st.standalone.errlog, topology_st.standalone.errlog))
os.system('touch %s' % (topology_st.standalone.errlog))
time.sleep(2)
topology_st.standalone.start(timeout=120)

> connectWithOpenssl(topology_st, 'DES-CBC3-SHA', False)

/export/tests/tickets/ticket48194_test.py:250:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

topology_st = <lib389.topologies.TopologyMain object at 0x7fd1675089a0>
cipher = 'DES-CBC3-SHA', expect = False

def connectWithOpenssl(topology_st, cipher, expect):
"""
Connect with the given cipher
Condition:
If expect is True, the handshake should be successful.
If expect is False, the handshake should be refused with
access log: "Cannot communicate securely with peer:
no common encryption algorithm(s)."
"""
log.info("Testing %s -- expect to handshake %s", cipher, "successfully" if expect else "failed")

myurl = 'localhost:%s' % LDAPSPORT
cmdline = ['/usr/bin/openssl', 's_client', '-connect', myurl, '-cipher', cipher]

strcmdline = " ".join(cmdline)
log.info("Running cmdline: %s", strcmdline)

try:
proc = subprocess.Popen(cmdline, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT)
except ValueError:
log.info("%s failed: %s", cmdline, ValueError)
proc.kill()

while True:
l = proc.stdout.readline()
if l == b"":
break
if b'Cipher is' in l:
log.info("Found: %s", l)
if expect:
if b'(NONE)' in l:
assert False
else:
proc.stdin.close()
assert True
else:
if b'(NONE)' in l:
assert True
else:
proc.stdin.close()
> assert False
E assert False

/export/tests/tickets/ticket48194_test.py:117: AssertionError
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket48194_test.py:40 ############################################### INFO  lib389:ticket48194_test.py:41 ####### Test Case 6 - Check default nsSSL3Ciphers (default setting) with default allowWeakCipher INFO  lib389:ticket48194_test.py:42 ############################################### INFO  lib389.utils:ticket48194_test.py:243 ######################### Restarting the server ###################### INFO  lib389.utils:ticket48194_test.py:86 Testing DES-CBC3-SHA -- expect to handshake failed INFO  lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher DES-CBC3-SHA INFO  lib389.utils:ticket48194_test.py:105 Found: b'New, TLSv1.3, Cipher is TLS_AES_128_GCM_SHA256\n'
Failed tickets/ticket48194_test.py::test_run_6 6.79
topology_st = <lib389.topologies.TopologyMain object at 0x7fd1675089a0>

def test_run_6(topology_st):
"""
Check nsSSL3Ciphers: +all,-TLS_RSA_WITH_AES_256_CBC_SHA256
All ciphers are disabled.
default allowWeakCipher
"""
_header(topology_st,
'Test Case 7 - Check nsSSL3Ciphers: +all,-TLS_RSA_WITH_AES_256_CBC_SHA256 with default allowWeakCipher')

topology_st.standalone.simple_bind_s(DN_DM, PASSWORD)
topology_st.standalone.modify_s(ENCRYPTION_DN,
[(ldap.MOD_REPLACE, 'nsSSL3Ciphers', b'+all,-TLS_RSA_WITH_AES_256_CBC_SHA256')])

log.info("\n######################### Restarting the server ######################\n")
topology_st.standalone.stop(timeout=10)
os.system('mv %s %s.48194_5' % (topology_st.standalone.errlog, topology_st.standalone.errlog))
os.system('touch %s' % (topology_st.standalone.errlog))
time.sleep(2)
topology_st.standalone.start(timeout=120)

> connectWithOpenssl(topology_st, 'DES-CBC3-SHA', False)

/export/tests/tickets/ticket48194_test.py:274:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

topology_st = <lib389.topologies.TopologyMain object at 0x7fd1675089a0>
cipher = 'DES-CBC3-SHA', expect = False

def connectWithOpenssl(topology_st, cipher, expect):
"""
Connect with the given cipher
Condition:
If expect is True, the handshake should be successful.
If expect is False, the handshake should be refused with
access log: "Cannot communicate securely with peer:
no common encryption algorithm(s)."
"""
log.info("Testing %s -- expect to handshake %s", cipher, "successfully" if expect else "failed")

myurl = 'localhost:%s' % LDAPSPORT
cmdline = ['/usr/bin/openssl', 's_client', '-connect', myurl, '-cipher', cipher]

strcmdline = " ".join(cmdline)
log.info("Running cmdline: %s", strcmdline)

try:
proc = subprocess.Popen(cmdline, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT)
except ValueError:
log.info("%s failed: %s", cmdline, ValueError)
proc.kill()

while True:
l = proc.stdout.readline()
if l == b"":
break
if b'Cipher is' in l:
log.info("Found: %s", l)
if expect:
if b'(NONE)' in l:
assert False
else:
proc.stdin.close()
assert True
else:
if b'(NONE)' in l:
assert True
else:
proc.stdin.close()
> assert False
E assert False

/export/tests/tickets/ticket48194_test.py:117: AssertionError
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket48194_test.py:40 ############################################### INFO  lib389:ticket48194_test.py:41 ####### Test Case 7 - Check nsSSL3Ciphers: +all,-TLS_RSA_WITH_AES_256_CBC_SHA256 with default allowWeakCipher INFO  lib389:ticket48194_test.py:42 ############################################### INFO  lib389.utils:ticket48194_test.py:267 ######################### Restarting the server ###################### INFO  lib389.utils:ticket48194_test.py:86 Testing DES-CBC3-SHA -- expect to handshake failed INFO  lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher DES-CBC3-SHA INFO  lib389.utils:ticket48194_test.py:105 Found: b'New, TLSv1.3, Cipher is TLS_AES_128_GCM_SHA256\n'
Failed tickets/ticket48194_test.py::test_run_8 6.77
topology_st = <lib389.topologies.TopologyMain object at 0x7fd1675089a0>

def test_run_8(topology_st):
"""
Check nsSSL3Ciphers: default + allowWeakCipher: off
Strong Default ciphers are enabled.
"""
_header(topology_st, 'Test Case 9 - Check default nsSSL3Ciphers (default setting + allowWeakCipher: off)')

topology_st.standalone.simple_bind_s(DN_DM, PASSWORD)
topology_st.standalone.modify_s(ENCRYPTION_DN, [(ldap.MOD_REPLACE, 'nsSSL3Ciphers', b'default'),
(ldap.MOD_REPLACE, 'allowWeakCipher', b'off')])

log.info("\n######################### Restarting the server ######################\n")
topology_st.standalone.stop(timeout=10)
os.system('mv %s %s.48194_7' % (topology_st.standalone.errlog, topology_st.standalone.errlog))
os.system('touch %s' % (topology_st.standalone.errlog))
time.sleep(2)
topology_st.standalone.start(timeout=120)

> connectWithOpenssl(topology_st, 'DES-CBC3-SHA', False)

/export/tests/tickets/ticket48194_test.py:297:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

topology_st = <lib389.topologies.TopologyMain object at 0x7fd1675089a0>
cipher = 'DES-CBC3-SHA', expect = False

def connectWithOpenssl(topology_st, cipher, expect):
"""
Connect with the given cipher
Condition:
If expect is True, the handshake should be successful.
If expect is False, the handshake should be refused with
access log: "Cannot communicate securely with peer:
no common encryption algorithm(s)."
"""
log.info("Testing %s -- expect to handshake %s", cipher, "successfully" if expect else "failed")

myurl = 'localhost:%s' % LDAPSPORT
cmdline = ['/usr/bin/openssl', 's_client', '-connect', myurl, '-cipher', cipher]

strcmdline = " ".join(cmdline)
log.info("Running cmdline: %s", strcmdline)

try:
proc = subprocess.Popen(cmdline, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT)
except ValueError:
log.info("%s failed: %s", cmdline, ValueError)
proc.kill()

while True:
l = proc.stdout.readline()
if l == b"":
break
if b'Cipher is' in l:
log.info("Found: %s", l)
if expect:
if b'(NONE)' in l:
assert False
else:
proc.stdin.close()
assert True
else:
if b'(NONE)' in l:
assert True
else:
proc.stdin.close()
> assert False
E assert False

/export/tests/tickets/ticket48194_test.py:117: AssertionError
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket48194_test.py:40 ############################################### INFO  lib389:ticket48194_test.py:41 ####### Test Case 9 - Check default nsSSL3Ciphers (default setting + allowWeakCipher: off) INFO  lib389:ticket48194_test.py:42 ############################################### INFO  lib389.utils:ticket48194_test.py:290 ######################### Restarting the server ###################### INFO  lib389.utils:ticket48194_test.py:86 Testing DES-CBC3-SHA -- expect to handshake failed INFO  lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher DES-CBC3-SHA INFO  lib389.utils:ticket48194_test.py:105 Found: b'New, TLSv1.3, Cipher is TLS_AES_128_GCM_SHA256\n'
Failed tickets/ticket48228_test.py::test_ticket48228_test_global_policy 1.34
topology_st = <lib389.topologies.TopologyMain object at 0x7fd166db37c0>
user = 'uid=user1,dc=example,dc=com', passwd = 'password', times = 6

def update_passwd(topology_st, user, passwd, times):
# Set the default value
cpw = passwd
for i in range(times):
log.info(" Bind as {%s,%s}" % (user, cpw))
topology_st.standalone.simple_bind_s(user, cpw)
# Now update the value for this iter.
cpw = 'password%d' % i
try:
> topology_st.standalone.modify_s(user, [(ldap.MOD_REPLACE, 'userpassword', cpw.encode())])

/export/tests/tickets/ticket48228_test.py:136:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

args = ('uid=user1,dc=example,dc=com', [(2, 'userpassword', b'password0')])
kwargs = {}
c_stack = [FrameInfo(frame=<frame at 0x7fd167206840, file '/usr/local/lib/python3.8/site-packages/lib389/__init__.py', line 180,...mbda>', code_context=[' self._inner_hookexec = lambda hook, methods, kwargs: hook.multicall(\n'], index=0), ...]
frame = FrameInfo(frame=<frame at 0x559a4e2b6da0, file '/export/tests/tickets/ticket48228_test.py', line 141, code update_pass...t=[" topology_st.standalone.modify_s(user, [(ldap.MOD_REPLACE, 'userpassword', cpw.encode())])\n"], index=0)

def inner(*args, **kwargs):
if name in [
'add_s',
'bind_s',
'delete_s',
'modify_s',
'modrdn_s',
'rename_s',
'sasl_interactive_bind_s',
'search_s',
'search_ext_s',
'simple_bind_s',
'unbind_s',
'getEntry',
] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'):
c_stack = inspect.stack()
frame = c_stack[1]

warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. "
"Found in: %s:%s" % (name, frame.filename, frame.lineno)))
# Later, we will add a sleep here to make it even more painful.
# Finally, it will raise an exception.
elif 'escapehatch' in kwargs:
kwargs.pop('escapehatch')

if name == 'result':
objtype, data = f(*args, **kwargs)
# data is either a 2-tuple or a list of 2-tuples
# print data
if data:
if isinstance(data, tuple):
return objtype, Entry(data)
elif isinstance(data, list):
# AD sends back these search references
# if objtype == ldap.RES_SEARCH_RESULT and \
# isinstance(data[-1],tuple) and \
# not data[-1][0]:
# print "Received search reference: "
# pprint.pprint(data[-1][1])
# data.pop() # remove the last non-entry element

return objtype, [Entry(x) for x in data]
else:
raise TypeError("unknown data type %s returned by result" %
type(data))
else:
return objtype, data
elif name.startswith('add'):
# the first arg is self
# the second and third arg are the dn and the data to send
# We need to convert the Entry into the format used by
# python-ldap
ent = args[0]
if isinstance(ent, Entry):
return f(ent.dn, ent.toTupleList(), *args[2:])
else:
return f(*args, **kwargs)
else:
> return f(*args, **kwargs)

/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd166db3790>
dn = 'uid=user1,dc=example,dc=com'
modlist = [(2, 'userpassword', b'password0')]

def modify_s(self,dn,modlist):
> return self.modify_ext_s(dn,modlist,None,None)

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:640:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

args = ('uid=user1,dc=example,dc=com', [(2, 'userpassword', b'password0')], None, None)
kwargs = {}

def inner(*args, **kwargs):
if name in [
'add_s',
'bind_s',
'delete_s',
'modify_s',
'modrdn_s',
'rename_s',
'sasl_interactive_bind_s',
'search_s',
'search_ext_s',
'simple_bind_s',
'unbind_s',
'getEntry',
] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'):
c_stack = inspect.stack()
frame = c_stack[1]

warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. "
"Found in: %s:%s" % (name, frame.filename, frame.lineno)))
# Later, we will add a sleep here to make it even more painful.
# Finally, it will raise an exception.
elif 'escapehatch' in kwargs:
kwargs.pop('escapehatch')

if name == 'result':
objtype, data = f(*args, **kwargs)
# data is either a 2-tuple or a list of 2-tuples
# print data
if data:
if isinstance(data, tuple):
return objtype, Entry(data)
elif isinstance(data, list):
# AD sends back these search references
# if objtype == ldap.RES_SEARCH_RESULT and \
# isinstance(data[-1],tuple) and \
# not data[-1][0]:
# print "Received search reference: "
# pprint.pprint(data[-1][1])
# data.pop() # remove the last non-entry element

return objtype, [Entry(x) for x in data]
else:
raise TypeError("unknown data type %s returned by result" %
type(data))
else:
return objtype, data
elif name.startswith('add'):
# the first arg is self
# the second and third arg are the dn and the data to send
# We need to convert the Entry into the format used by
# python-ldap
ent = args[0]
if isinstance(ent, Entry):
return f(ent.dn, ent.toTupleList(), *args[2:])
else:
return f(*args, **kwargs)
else:
> return f(*args, **kwargs)

/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd166db3790>
dn = 'uid=user1,dc=example,dc=com'
modlist = [(2, 'userpassword', b'password0')], serverctrls = None
clientctrls = None

def modify_ext_s(self,dn,modlist,serverctrls=None,clientctrls=None):
msgid = self.modify_ext(dn,modlist,serverctrls,clientctrls)
> resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout)

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:613:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

args = (10,), kwargs = {'all': 1, 'timeout': -1}

def inner(*args, **kwargs):
if name in [
'add_s',
'bind_s',
'delete_s',
'modify_s',
'modrdn_s',
'rename_s',
'sasl_interactive_bind_s',
'search_s',
'search_ext_s',
'simple_bind_s',
'unbind_s',
'getEntry',
] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'):
c_stack = inspect.stack()
frame = c_stack[1]

warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. "
"Found in: %s:%s" % (name, frame.filename, frame.lineno)))
# Later, we will add a sleep here to make it even more painful.
# Finally, it will raise an exception.
elif 'escapehatch' in kwargs:
kwargs.pop('escapehatch')

if name == 'result':
objtype, data = f(*args, **kwargs)
# data is either a 2-tuple or a list of 2-tuples
# print data
if data:
if isinstance(data, tuple):
return objtype, Entry(data)
elif isinstance(data, list):
# AD sends back these search references
# if objtype == ldap.RES_SEARCH_RESULT and \
# isinstance(data[-1],tuple) and \
# not data[-1][0]:
# print "Received search reference: "
# pprint.pprint(data[-1][1])
# data.pop() # remove the last non-entry element

return objtype, [Entry(x) for x in data]
else:
raise TypeError("unknown data type %s returned by result" %
type(data))
else:
return objtype, data
elif name.startswith('add'):
# the first arg is self
# the second and third arg are the dn and the data to send
# We need to convert the Entry into the format used by
# python-ldap
ent = args[0]
if isinstance(ent, Entry):
return f(ent.dn, ent.toTupleList(), *args[2:])
else:
return f(*args, **kwargs)
else:
> return f(*args, **kwargs)

/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd166db3790>, msgid = 10, all = 1
timeout = -1, resp_ctrl_classes = None

def result3(self,msgid=ldap.RES_ANY,all=1,timeout=None,resp_ctrl_classes=None):
> resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4(
msgid,all,timeout,
add_ctrls=0,add_intermediates=0,add_extop=0,
resp_ctrl_classes=resp_ctrl_classes
)

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:764:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

args = (10, 1, -1)
kwargs = {'add_ctrls': 0, 'add_extop': 0, 'add_intermediates': 0, 'resp_ctrl_classes': None}

def inner(*args, **kwargs):
if name in [
'add_s',
'bind_s',
'delete_s',
'modify_s',
'modrdn_s',
'rename_s',
'sasl_interactive_bind_s',
'search_s',
'search_ext_s',
'simple_bind_s',
'unbind_s',
'getEntry',
] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'):
c_stack = inspect.stack()
frame = c_stack[1]

warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. "
"Found in: %s:%s" % (name, frame.filename, frame.lineno)))
# Later, we will add a sleep here to make it even more painful.
# Finally, it will raise an exception.
elif 'escapehatch' in kwargs:
kwargs.pop('escapehatch')

if name == 'result':
objtype, data = f(*args, **kwargs)
# data is either a 2-tuple or a list of 2-tuples
# print data
if data:
if isinstance(data, tuple):
return objtype, Entry(data)
elif isinstance(data, list):
# AD sends back these search references
# if objtype == ldap.RES_SEARCH_RESULT and \
# isinstance(data[-1],tuple) and \
# not data[-1][0]:
# print "Received search reference: "
# pprint.pprint(data[-1][1])
# data.pop() # remove the last non-entry element

return objtype, [Entry(x) for x in data]
else:
raise TypeError("unknown data type %s returned by result" %
type(data))
else:
return objtype, data
elif name.startswith('add'):
# the first arg is self
# the second and third arg are the dn and the data to send
# We need to convert the Entry into the format used by
# python-ldap
ent = args[0]
if isinstance(ent, Entry):
return f(ent.dn, ent.toTupleList(), *args[2:])
else:
return f(*args, **kwargs)
else:
> return f(*args, **kwargs)

/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd166db3790>, msgid = 10, all = 1
timeout = -1, add_ctrls = 0, add_intermediates = 0, add_extop = 0
resp_ctrl_classes = None

def result4(self,msgid=ldap.RES_ANY,all=1,timeout=None,add_ctrls=0,add_intermediates=0,add_extop=0,resp_ctrl_classes=None):
if timeout is None:
timeout = self.timeout
> ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop)

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:774:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

args = (<built-in method result4 of LDAP object at 0x7fd166c78a50>, 10, 1, -1, 0, 0, ...)
kwargs = {}

def inner(*args, **kwargs):
if name in [
'add_s',
'bind_s',
'delete_s',
'modify_s',
'modrdn_s',
'rename_s',
'sasl_interactive_bind_s',
'search_s',
'search_ext_s',
'simple_bind_s',
'unbind_s',
'getEntry',
] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'):
c_stack = inspect.stack()
frame = c_stack[1]

warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. "
"Found in: %s:%s" % (name, frame.filename, frame.lineno)))
# Later, we will add a sleep here to make it even more painful.
# Finally, it will raise an exception.
elif 'escapehatch' in kwargs:
kwargs.pop('escapehatch')

if name == 'result':
objtype, data = f(*args, **kwargs)
# data is either a 2-tuple or a list of 2-tuples
# print data
if data:
if isinstance(data, tuple):
return objtype, Entry(data)
elif isinstance(data, list):
# AD sends back these search references
# if objtype == ldap.RES_SEARCH_RESULT and \
# isinstance(data[-1],tuple) and \
# not data[-1][0]:
# print "Received search reference: "
# pprint.pprint(data[-1][1])
# data.pop() # remove the last non-entry element

return objtype, [Entry(x) for x in data]
else:
raise TypeError("unknown data type %s returned by result" %
type(data))
else:
return objtype, data
elif name.startswith('add'):
# the first arg is self
# the second and third arg are the dn and the data to send
# We need to convert the Entry into the format used by
# python-ldap
ent = args[0]
if isinstance(ent, Entry):
return f(ent.dn, ent.toTupleList(), *args[2:])
else:
return f(*args, **kwargs)
else:
> return f(*args, **kwargs)

/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd166db3790>
func = <built-in method result4 of LDAP object at 0x7fd166c78a50>
args = (10, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None
exc_type = None, exc_value = None, exc_traceback = None

def _ldap_call(self,func,*args,**kwargs):
"""
Wrapper method mainly for serializing calls into OpenLDAP libs
and trace logs
"""
self._ldap_object_lock.acquire()
if __debug__:
if self._trace_level>=1:
self._trace_file.write('*** %s %s - %s\n%s\n' % (
repr(self),
self._uri,
'.'.join((self.__class__.__name__,func.__name__)),
pprint.pformat((args,kwargs))
))
if self._trace_level>=9:
traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)
diagnostic_message_success = None
try:
try:
result = func(*args,**kwargs)
if __debug__ and self._trace_level>=2:
if func.__name__!="unbind_ext":
diagnostic_message_success = self._l.get_option(ldap.OPT_DIAGNOSTIC_MESSAGE)
finally:
self._ldap_object_lock.release()
except LDAPError as e:
exc_type,exc_value,exc_traceback = sys.exc_info()
try:
if 'info' not in e.args[0] and 'errno' in e.args[0]:
e.args[0]['info'] = strerror(e.args[0]['errno'])
except IndexError:
pass
if __debug__ and self._trace_level>=2:
self._trace_file.write('=> LDAPError - %s: %s\n' % (e.__class__.__name__,str(e)))
try:
> reraise(exc_type, exc_value, exc_traceback)

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:340:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

exc_type = <class 'ldap.INSUFFICIENT_ACCESS'>
exc_value = INSUFFICIENT_ACCESS({'msgtype': 103, 'msgid': 10, 'result': 50, 'desc': 'Insufficient access', 'ctrls': [], 'info': "Insufficient 'write' privilege to the 'userPassword' attribute of entry 'uid=user1,dc=example,dc=com'.\n"})
exc_traceback = <traceback object at 0x7fd166763bc0>

def reraise(exc_type, exc_value, exc_traceback):
"""Re-raise an exception given information from sys.exc_info()

Note that unlike six.reraise, this does not support replacing the
traceback. All arguments must come from a single sys.exc_info() call.
"""
# In Python 3, all exception info is contained in one object.
> raise exc_value

/usr/local/lib64/python3.8/site-packages/ldap/compat.py:46:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd166db3790>
func = <built-in method result4 of LDAP object at 0x7fd166c78a50>
args = (10, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None
exc_type = None, exc_value = None, exc_traceback = None

def _ldap_call(self,func,*args,**kwargs):
"""
Wrapper method mainly for serializing calls into OpenLDAP libs
and trace logs
"""
self._ldap_object_lock.acquire()
if __debug__:
if self._trace_level>=1:
self._trace_file.write('*** %s %s - %s\n%s\n' % (
repr(self),
self._uri,
'.'.join((self.__class__.__name__,func.__name__)),
pprint.pformat((args,kwargs))
))
if self._trace_level>=9:
traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)
diagnostic_message_success = None
try:
try:
> result = func(*args,**kwargs)
E ldap.INSUFFICIENT_ACCESS: {'msgtype': 103, 'msgid': 10, 'result': 50, 'desc': 'Insufficient access', 'ctrls': [], 'info': "Insufficient 'write' privilege to the 'userPassword' attribute of entry 'uid=user1,dc=example,dc=com'.\n"}

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:324: INSUFFICIENT_ACCESS

During handling of the above exception, another exception occurred:

topology_st = <lib389.topologies.TopologyMain object at 0x7fd166db37c0>

def test_ticket48228_test_global_policy(topology_st):
"""
Check global password policy
"""
log.info(' Set inhistory = 6')
set_global_pwpolicy(topology_st, 6)

log.info(' Bind as directory manager')
log.info("Bind as %s" % DN_DM)
topology_st.standalone.simple_bind_s(DN_DM, PASSWORD)

log.info(' Add an entry' + USER1_DN)
try:
topology_st.standalone.add_s(
Entry((USER1_DN, {'objectclass': "top person organizationalPerson inetOrgPerson".split(),
'sn': '1',
'cn': 'user 1',
'uid': 'user1',
'givenname': 'user',
'mail': 'user1@example.com',
'userpassword': 'password'})))
except ldap.LDAPError as e:
log.fatal('test_ticket48228: Failed to add user' + USER1_DN + ': error ' + e.message['desc'])
assert False

log.info(' Update the password of ' + USER1_DN + ' 6 times')
> update_passwd(topology_st, USER1_DN, 'password', 6)

/export/tests/tickets/ticket48228_test.py:174:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

topology_st = <lib389.topologies.TopologyMain object at 0x7fd166db37c0>
user = 'uid=user1,dc=example,dc=com', passwd = 'password', times = 6

def update_passwd(topology_st, user, passwd, times):
# Set the default value
cpw = passwd
for i in range(times):
log.info(" Bind as {%s,%s}" % (user, cpw))
topology_st.standalone.simple_bind_s(user, cpw)
# Now update the value for this iter.
cpw = 'password%d' % i
try:
topology_st.standalone.modify_s(user, [(ldap.MOD_REPLACE, 'userpassword', cpw.encode())])
except ldap.LDAPError as e:
log.fatal(
> 'test_ticket48228: Failed to update the password ' + cpw + ' of user ' + user + ': error ' + e.message[
'desc'])
E AttributeError: 'INSUFFICIENT_ACCESS' object has no attribute 'message'

/export/tests/tickets/ticket48228_test.py:139: AttributeError
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Failed tickets/ticket48234_test.py::test_ticket48234 0.34
topology_st = <lib389.topologies.TopologyMain object at 0x7fd166c4b640>

def test_ticket48234(topology_st):
"""
Test aci which contains an extensible filter.
shutdown
"""

log.info('Bind as root DN')
try:
topology_st.standalone.simple_bind_s(DN_DM, PASSWORD)
except ldap.LDAPError as e:
topology_st.standalone.log.error('Root DN failed to authenticate: ' + e.args[0]['desc'])
assert False

ouname = 'outest'
username = 'admin'
passwd = 'Password'
deniedattr = 'telephonenumber'
log.info('Add aci which contains extensible filter.')
aci_text = ('(targetattr = "%s")' % (deniedattr) +
'(target = "ldap:///%s")' % (DEFAULT_SUFFIX) +
'(version 3.0;acl "admin-tel-matching-rule-outest";deny (all)' +
'(userdn = "ldap:///%s??sub?(&(cn=%s)(ou:dn:=%s))");)' % (DEFAULT_SUFFIX, username, ouname))

try:
topology_st.standalone.modify_s(DEFAULT_SUFFIX, [(ldap.MOD_ADD, 'aci', ensure_bytes(aci_text))])
except ldap.LDAPError as e:
log.error('Failed to add aci: (%s) error %s' % (aci_text, e.args[0]['desc']))
assert False

log.info('Add entries ...')
for idx in range(0, 2):
ou0 = 'OU%d' % idx
log.info('adding %s under %s...' % (ou0, DEFAULT_SUFFIX))
add_ou_entry(topology_st.standalone, ou0, DEFAULT_SUFFIX)
parent = 'ou=%s,%s' % (ou0, DEFAULT_SUFFIX)
log.info('adding %s under %s...' % (ouname, parent))
add_ou_entry(topology_st.standalone, ouname, parent)

for idx in range(0, 2):
parent = 'ou=%s,ou=OU%d,%s' % (ouname, idx, DEFAULT_SUFFIX)
log.info('adding %s under %s...' % (username, parent))
add_user_entry(topology_st.standalone, username, passwd, parent)

binddn = 'cn=%s,%s' % (username, parent)
log.info('Bind as user %s' % binddn)
try:
topology_st.standalone.simple_bind_s(binddn, passwd)
except ldap.LDAPError as e:
topology_st.standalone.log.error(bindn + ' failed to authenticate: ' + e.args[0]['desc'])
assert False

filter = '(cn=%s)' % username
try:
entries = topology_st.standalone.search_s(DEFAULT_SUFFIX, ldap.SCOPE_SUBTREE, filter, [deniedattr, 'dn'])
> assert 2 == len(entries)
E assert 2 == 0
E +2
E -0

/export/tests/tickets/ticket48234_test.py:83: AssertionError
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48234_test:ticket48234_test.py:35 Bind as root DN INFO  tests.tickets.ticket48234_test:ticket48234_test.py:46 Add aci which contains extensible filter. INFO  tests.tickets.ticket48234_test:ticket48234_test.py:58 Add entries ... INFO  tests.tickets.ticket48234_test:ticket48234_test.py:61 adding OU0 under dc=example,dc=com... INFO  tests.tickets.ticket48234_test:ticket48234_test.py:64 adding outest under ou=OU0,dc=example,dc=com... INFO  tests.tickets.ticket48234_test:ticket48234_test.py:61 adding OU1 under dc=example,dc=com... INFO  tests.tickets.ticket48234_test:ticket48234_test.py:64 adding outest under ou=OU1,dc=example,dc=com... INFO  tests.tickets.ticket48234_test:ticket48234_test.py:69 adding admin under ou=outest,ou=OU0,dc=example,dc=com... INFO  tests.tickets.ticket48234_test:ticket48234_test.py:69 adding admin under ou=outest,ou=OU1,dc=example,dc=com... INFO  tests.tickets.ticket48234_test:ticket48234_test.py:73 Bind as user cn=admin,ou=outest,ou=OU1,dc=example,dc=com
Failed tickets/ticket48266_test.py::test_ticket48266_count_csn_evaluation 0.38
topology_m2 = <lib389.topologies.TopologyMain object at 0x7fd1668a14f0>
entries = None

def test_ticket48266_count_csn_evaluation(topology_m2, entries):
ents = topology_m2.ms["master1"].agreement.list(suffix=SUFFIX)
assert len(ents) == 1
> first_csn = _get_first_not_replicated_csn(topology_m2)

/export/tests/tickets/ticket48266_test.py:176:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

topology_m2 = <lib389.topologies.TopologyMain object at 0x7fd1668a14f0>

def _get_first_not_replicated_csn(topology_m2):
name = "cn=%s2,%s" % (NEW_ACCOUNT, SUFFIX)

# read the first CSN that will not be replicated
mod = [(ldap.MOD_REPLACE, 'telephonenumber', ensure_bytes('123456'))]
topology_m2.ms["master1"].modify_s(name, mod)
msgid = topology_m2.ms["master1"].search_ext(name, ldap.SCOPE_SUBTREE, 'objectclass=*', ['nscpentrywsi'])
rtype, rdata, rmsgid = topology_m2.ms["master1"].result2(msgid)
attrs = None
for dn, raw_attrs in rdata:
topology_m2.ms["master1"].log.info("dn: %s" % dn)
if 'nscpentrywsi' in raw_attrs:
attrs = raw_attrs['nscpentrywsi']
assert attrs
for attr in attrs:
if ensure_str(attr.lower()).startswith('telephonenumber'):
break
assert attr

log.info("############# %s " % name)
# now retrieve the CSN of the operation we are looking for
csn = None
found_ops = topology_m2.ms['master1'].ds_access_log.match(".*MOD dn=\"%s\".*" % name)
assert(len(found_ops) > 0)
found_op = topology_m2.ms['master1'].ds_access_log.parse_line(found_ops[-1])
log.info(found_op)

# Now look for the related CSN
found_csns = topology_m2.ms['master1'].ds_access_log.match(".*conn=%s op=%s RESULT.*" % (found_op['conn'], found_op['op']))
assert(len(found_csns) > 0)
found_csn = topology_m2.ms['master1'].ds_access_log.parse_line(found_csns[-1])
log.info(found_csn)
> return found_csn['csn']
E KeyError: 'csn'

/export/tests/tickets/ticket48266_test.py:147: KeyError
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket48266_test.py:125 dn: cn=new_account2,dc=example,dc=com INFO  tests.tickets.ticket48266_test:ticket48266_test.py:134 ############# cn=new_account2,dc=example,dc=com INFO  tests.tickets.ticket48266_test:ticket48266_test.py:140 {'action': 'MOD', 'timestamp': '[29/Oct/2020:00:00:44.210783641 -0400]', 'conn': '1', 'op': '12', 'rem': 'dn="cn=new_account2,dc=example,dc=com"', 'datetime': datetime.datetime(2020, 9, 29, 0, 0, 0, 210783, tzinfo=tzoffset(None, -14400))} INFO  tests.tickets.ticket48266_test:ticket48266_test.py:146 {'action': 'RESULT', 'timestamp': '[29/Oct/2020:00:00:44.279365592 -0400]', 'conn': '1', 'op': '12', 'rem': 'err=0 tag=103 nentries=0 wtime=0.000133414 optime=0.068594673 etime=0.068721135 csn=5f9a3e6c000000010000', 'datetime': datetime.datetime(2020, 9, 29, 0, 0, 0, 279365, tzinfo=tzoffset(None, -14400))}
Failed tickets/ticket48325_test.py::test_ticket48325 0.01
topology_m1h1c1 = <lib389.topologies.TopologyMain object at 0x7fd1669b6130>

def test_ticket48325(topology_m1h1c1):
"""
Test that the RUV element order is correctly maintained when promoting
a hub or consumer.
"""

#
# Promote consumer to master
#
C1 = topology_m1h1c1.cs["consumer1"]
M1 = topology_m1h1c1.ms["master1"]
H1 = topology_m1h1c1.hs["hub1"]
repl = ReplicationManager(DEFAULT_SUFFIX)
> repl._ensure_changelog(C1)

/export/tests/tickets/ticket48325_test.py:53:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/usr/local/lib/python3.8/site-packages/lib389/replica.py:1928: in _ensure_changelog
cl.create(properties={
/usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:971: in create
return self._create(rdn, properties, basedn, ensure=False)
/usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:946: in _create
self._instance.add_ext_s(e, serverctrls=self._server_controls, clientctrls=self._client_controls, escapehatch='i am sure')
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:176: in inner
return f(ent.dn, ent.toTupleList(), *args[2:])
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:425: in add_ext_s
resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:764: in result3
resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4(
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:774: in result4
ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call
reraise(exc_type, exc_value, exc_traceback)
/usr/local/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise
raise exc_value
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd166ab1b20>
func = <built-in method result4 of LDAP object at 0x7fd166998f60>
args = (15, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None
exc_type = None, exc_value = None, exc_traceback = None

def _ldap_call(self,func,*args,**kwargs):
"""
Wrapper method mainly for serializing calls into OpenLDAP libs
and trace logs
"""
self._ldap_object_lock.acquire()
if __debug__:
if self._trace_level>=1:
self._trace_file.write('*** %s %s - %s\n%s\n' % (
repr(self),
self._uri,
'.'.join((self.__class__.__name__,func.__name__)),
pprint.pformat((args,kwargs))
))
if self._trace_level>=9:
traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)
diagnostic_message_success = None
try:
try:
> result = func(*args,**kwargs)
E ldap.UNWILLING_TO_PERFORM: {'msgtype': 105, 'msgid': 15, 'result': 53, 'desc': 'Server is unwilling to perform', 'ctrls': [], 'info': 'Changelog configuration is part of the backend configuration'}

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:324: UNWILLING_TO_PERFORM
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for hub1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39101, 'ldap-secureport': 63801, 'server-id': 'hub1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for consumer1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:524 Creating replication topology. INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39101 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39101 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39101 is NOT working (expect 6072bd44-59e9-4033-996d-437a1fc4c9bf / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39101 is working INFO  lib389.replica:replica.py:2211 SUCCESS: joined consumer from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39101 INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39101 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is was created INFO  lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39101 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 218d8953-f3b7-4c9f-9ee7-cf81e4a8f1ff / got description=6072bd44-59e9-4033-996d-437a1fc4c9bf) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is working
Failed tickets/ticket48342_test.py::test_ticket4026 90.93
topology_m3 = <lib389.topologies.TopologyMain object at 0x7fd16676c790>

def test_ticket4026(topology_m3):
"""Write your replication testcase here.

To access each DirSrv instance use: topology_m3.ms["master1"], topology_m3.ms["master2"],
..., topology_m3.hub1, ..., topology_m3.consumer1, ...

Also, if you need any testcase initialization,
please, write additional fixture for that(include finalizer).
"""

try:
topology_m3.ms["master1"].add_s(Entry((PEOPLE_DN, {
'objectclass': "top extensibleObject".split(),
'ou': 'people'})))
except ldap.ALREADY_EXISTS:
pass

topology_m3.ms["master1"].add_s(Entry(('ou=ranges,' + SUFFIX, {
'objectclass': 'top organizationalunit'.split(),
'ou': 'ranges'
})))
for cpt in range(MAX_ACCOUNTS):
name = "user%d" % (cpt)
topology_m3.ms["master1"].add_s(Entry(("uid=%s,%s" % (name, PEOPLE_DN), {
'objectclass': 'top posixAccount extensibleObject'.split(),
'uid': name,
'cn': name,
'uidNumber': '1',
'gidNumber': '1',
'homeDirectory': '/home/%s' % name
})))

# make master3 having more free slots that master2
# so master1 will contact master3
_dna_config(topology_m3.ms["master1"], nextValue=100, maxValue=10)
_dna_config(topology_m3.ms["master2"], nextValue=200, maxValue=10)
_dna_config(topology_m3.ms["master3"], nextValue=300, maxValue=3000)

# Turn on lots of error logging now.

mod = [(ldap.MOD_REPLACE, 'nsslapd-errorlog-level', b'16384')]
# mod = [(ldap.MOD_REPLACE, 'nsslapd-errorlog-level', '1')]
topology_m3.ms["master1"].modify_s('cn=config', mod)
topology_m3.ms["master2"].modify_s('cn=config', mod)
topology_m3.ms["master3"].modify_s('cn=config', mod)

# We need to wait for the event in dna.c to fire to start the servers
# see dna.c line 899
time.sleep(60)

# add on master1 users with description DNA
for cpt in range(10):
name = "user_with_desc1_%d" % (cpt)
topology_m3.ms["master1"].add_s(Entry(("uid=%s,%s" % (name, PEOPLE_DN), {
'objectclass': 'top posixAccount extensibleObject'.split(),
'uid': name,
'cn': name,
'description': '-1',
'uidNumber': '1',
'gidNumber': '1',
'homeDirectory': '/home/%s' % name
})))
# give time to negociate master1 <--> master3
time.sleep(10)
# add on master1 users with description DNA
for cpt in range(11, 20):
name = "user_with_desc1_%d" % (cpt)
> topology_m3.ms["master1"].add_s(Entry(("uid=%s,%s" % (name, PEOPLE_DN), {
'objectclass': 'top posixAccount extensibleObject'.split(),
'uid': name,
'cn': name,
'description': '-1',
'uidNumber': '1',
'gidNumber': '1',
'homeDirectory': '/home/%s' % name
})))

/export/tests/tickets/ticket48342_test.py:118:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:176: in inner
return f(ent.dn, ent.toTupleList(), *args[2:])
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:439: in add_s
return self.add_ext_s(dn,modlist,None,None)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:178: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:425: in add_ext_s
resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:764: in result3
resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4(
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:774: in result4
ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call
reraise(exc_type, exc_value, exc_traceback)
/usr/local/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise
raise exc_value
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd16676c7f0>
func = <built-in method result4 of LDAP object at 0x7fd166c68240>
args = (15, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None
exc_type = None, exc_value = None, exc_traceback = None

def _ldap_call(self,func,*args,**kwargs):
"""
Wrapper method mainly for serializing calls into OpenLDAP libs
and trace logs
"""
self._ldap_object_lock.acquire()
if __debug__:
if self._trace_level>=1:
self._trace_file.write('*** %s %s - %s\n%s\n' % (
repr(self),
self._uri,
'.'.join((self.__class__.__name__,func.__name__)),
pprint.pformat((args,kwargs))
))
if self._trace_level>=9:
traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)
diagnostic_message_success = None
try:
try:
> result = func(*args,**kwargs)
E ldap.OPERATIONS_ERROR: {'msgtype': 105, 'msgid': 15, 'result': 1, 'desc': 'Operations error', 'ctrls': [], 'info': 'Allocation of a new value for range cn=dna config,cn=distributed numeric assignment plugin,cn=plugins,cn=config failed! Unable to proceed.'}

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:324: OPERATIONS_ERROR
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master3 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'master3', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ddedca47-9cb2-4d0d-a4c7-9a6bb8725250 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 8910b013-1329-43dc-87b5-eb9038d944c9 / got description=ddedca47-9cb2-4d0d-a4c7-9a6bb8725250) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:156 Joining master master3 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 46e2f41e-2bc0-4a16-91b5-331589937ff5 / got description=8910b013-1329-43dc-87b5-eb9038d944c9) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 5ce82e3c-4edc-4b59-a0ea-5e34ef43d9a6 / got description=46e2f41e-2bc0-4a16-91b5-331589937ff5) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master3 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master3 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master2 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48342_test:ticket48342_test.py:19 Add dna plugin config entry...ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 INFO  tests.tickets.ticket48342_test:ticket48342_test.py:37 Enable the DNA plugin... INFO  tests.tickets.ticket48342_test:ticket48342_test.py:44 Restarting the server... INFO  tests.tickets.ticket48342_test:ticket48342_test.py:19 Add dna plugin config entry...ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  tests.tickets.ticket48342_test:ticket48342_test.py:37 Enable the DNA plugin... INFO  tests.tickets.ticket48342_test:ticket48342_test.py:44 Restarting the server... INFO  tests.tickets.ticket48342_test:ticket48342_test.py:19 Add dna plugin config entry...ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 INFO  tests.tickets.ticket48342_test:ticket48342_test.py:37 Enable the DNA plugin... INFO  tests.tickets.ticket48342_test:ticket48342_test.py:44 Restarting the server...
Failed tickets/ticket48637_test.py::test_ticket48637 4.47
topology_st = <lib389.topologies.TopologyMain object at 0x7fd166bbb610>

def test_ticket48637(topology_st):
"""Test for entry cache corruption

This requires automember and managed entry plugins to be configured.

Then remove the group that automember would use to trigger a failure when
adding a new entry. Automember fails, and then managed entry also fails.

Make sure a base search on the entry returns error 32
"""

if DEBUGGING:
# Add debugging steps(if any)...
pass

#
# Add our setup entries
#
try:
topology_st.standalone.add_s(Entry((PEOPLE_OU, {
'objectclass': 'top organizationalunit'.split(),
'ou': 'people'})))
except ldap.ALREADY_EXISTS:
pass
except ldap.LDAPError as e:
log.fatal('Failed to add people ou: ' + str(e))
assert False

try:
topology_st.standalone.add_s(Entry((GROUP_OU, {
'objectclass': 'top organizationalunit'.split(),
'ou': 'groups'})))
except ldap.ALREADY_EXISTS:
pass
except ldap.LDAPError as e:
log.fatal('Failed to add groups ou: ' + str(e))
assert False

try:
topology_st.standalone.add_s(Entry((MEP_OU, {
'objectclass': 'top extensibleObject'.split(),
'ou': 'mep'})))
except ldap.LDAPError as e:
log.fatal('Failed to add MEP ou: ' + str(e))
assert False

try:
topology_st.standalone.add_s(Entry((MEP_TEMPLATE, {
'objectclass': 'top mepTemplateEntry'.split(),
'cn': 'mep template',
'mepRDNAttr': 'cn',
'mepStaticAttr': 'objectclass: groupofuniquenames',
'mepMappedAttr': 'cn: $uid'})))
except ldap.LDAPError as e:
log.fatal('Failed to add MEP ou: ' + str(e))
assert False

#
# Configure automember
#
try:
topology_st.standalone.add_s(Entry((AUTO_DN, {
'cn': 'All Users',
'objectclass': ['top', 'autoMemberDefinition'],
'autoMemberScope': 'dc=example,dc=com',
'autoMemberFilter': 'objectclass=person',
'autoMemberDefaultGroup': GROUP_DN,
'autoMemberGroupingAttr': 'uniquemember:dn'})))
except ldap.LDAPError as e:
log.fatal('Failed to configure automember plugin : ' + str(e))
assert False

#
# Configure managed entry plugin
#
try:
topology_st.standalone.add_s(Entry((MEP_DN, {
'cn': 'MEP Definition',
'objectclass': ['top', 'extensibleObject'],
'originScope': 'ou=people,dc=example,dc=com',
'originFilter': 'objectclass=person',
'managedBase': 'ou=groups,dc=example,dc=com',
'managedTemplate': MEP_TEMPLATE})))
except ldap.LDAPError as e:
log.fatal('Failed to configure managed entry plugin : ' + str(e))
assert False

#
# Restart DS
#
topology_st.standalone.restart(timeout=30)

#
# Add entry that should fail since the automember group does not exist
#
try:
topology_st.standalone.add_s(Entry((USER_DN, {
'uid': 'test',
'objectclass': ['top', 'person', 'extensibleObject'],
'sn': 'test',
'cn': 'test'})))
except ldap.LDAPError as e:
pass

#
# Search for the entry - it should not be returned
#
try:
entry = topology_st.standalone.search_s(USER_DN, ldap.SCOPE_SUBTREE,
'objectclass=*')
if entry:
log.fatal('Entry was incorrectly returned')
> assert False
E assert False

/export/tests/tickets/ticket48637_test.py:139: AssertionError
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
CRITICAL tests.tickets.ticket48637_test:ticket48637_test.py:138 Entry was incorrectly returned
Failed tickets/ticket48784_test.py::test_ticket48784 31.00
Fixture "add_entry" called directly. Fixtures are not meant to be called directly,
but are created automatically when test functions request them as parameters.
See https://docs.pytest.org/en/latest/fixture.html for more information about fixtures, and
https://docs.pytest.org/en/latest/deprecations.html#calling-fixtures-directly about how to update your code.
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 547b5a22-a7f6-42a0-9b56-53f89b7641bb / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 9fc9dba9-45c1-4829-a36f-0eac201ecdf9 / got description=547b5a22-a7f6-42a0-9b56-53f89b7641bb) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48784_test:ticket48784_test.py:90 Ticket 48784 - Allow usage of OpenLDAP libraries that don't use NSS for crypto INFO  tests.tickets.ticket48784_test:ticket48784_test.py:50 ######################### Configure SSL/TLS agreements ###################### INFO  tests.tickets.ticket48784_test:ticket48784_test.py:51 ######################## master1 <-- startTLS -> master2 ##################### INFO  tests.tickets.ticket48784_test:ticket48784_test.py:53 ##### Update the agreement of master1 INFO  tests.tickets.ticket48784_test:ticket48784_test.py:58 ##### Update the agreement of master2 INFO  tests.tickets.ticket48784_test:ticket48784_test.py:68 ######################### Configure SSL/TLS agreements Done ######################
Failed tickets/ticket48798_test.py::test_ticket48798 8.22
topology_st = <lib389.topologies.TopologyMain object at 0x7fd166a93dc0>

def test_ticket48798(topology_st):
"""
Test DH param sizes offered by DS.

"""
topology_st.standalone.enable_tls()

# Confirm that we have a connection, and that it has DH

# Open a socket to the port.
# Check the security settings.
> size = check_socket_dh_param_size(topology_st.standalone.host, topology_st.standalone.sslport)

/export/tests/tickets/ticket48798_test.py:46:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/export/tests/tickets/ticket48798_test.py:23: in check_socket_dh_param_size
output = check_output(cmd, shell=True)
/usr/lib64/python3.8/subprocess.py:411: in check_output
return run(*popenargs, stdout=PIPE, timeout=timeout, check=True,
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

input = None, capture_output = False, timeout = None, check = True
popenargs = ('echo quit | openssl s_client -connect ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63601 -msg -cipher DH | grep -A 1 ServerKeyExchange',)
kwargs = {'shell': True, 'stdout': -1}
process = <subprocess.Popen object at 0x7fd166aa2d60>, stdout = b''
stderr = None, retcode = 1

def run(*popenargs,
input=None, capture_output=False, timeout=None, check=False, **kwargs):
"""Run command with arguments and return a CompletedProcess instance.

The returned instance will have attributes args, returncode, stdout and
stderr. By default, stdout and stderr are not captured, and those attributes
will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them.

If check is True and the exit code was non-zero, it raises a
CalledProcessError. The CalledProcessError object will have the return code
in the returncode attribute, and output & stderr attributes if those streams
were captured.

If timeout is given, and the process takes too long, a TimeoutExpired
exception will be raised.

There is an optional argument "input", allowing you to
pass bytes or a string to the subprocess's stdin. If you use this argument
you may not also use the Popen constructor's "stdin" argument, as
it will be used internally.

By default, all communication is in bytes, and therefore any "input" should
be bytes, and the stdout and stderr will be bytes. If in text mode, any
"input" should be a string, and stdout and stderr will be strings decoded
according to locale encoding, or by "encoding" if set. Text mode is
triggered by setting any of text, encoding, errors or universal_newlines.

The other arguments are the same as for the Popen constructor.
"""
if input is not None:
if kwargs.get('stdin') is not None:
raise ValueError('stdin and input arguments may not both be used.')
kwargs['stdin'] = PIPE

if capture_output:
if kwargs.get('stdout') is not None or kwargs.get('stderr') is not None:
raise ValueError('stdout and stderr arguments may not be used '
'with capture_output.')
kwargs['stdout'] = PIPE
kwargs['stderr'] = PIPE

with Popen(*popenargs, **kwargs) as process:
try:
stdout, stderr = process.communicate(input, timeout=timeout)
except TimeoutExpired as exc:
process.kill()
if _mswindows:
# Windows accumulates the output in a single blocking
# read() call run on child threads, with the timeout
# being done in a join() on those threads. communicate()
# _after_ kill() is required to collect that and add it
# to the exception.
exc.stdout, exc.stderr = process.communicate()
else:
# POSIX _communicate already populated the output so
# far into the TimeoutExpired exception.
process.wait()
raise
except: # Including KeyboardInterrupt, communicate handled that.
process.kill()
# We don't call process.wait() as .__exit__ does that for us.
raise
retcode = process.poll()
if check and retcode:
> raise CalledProcessError(retcode, process.args,
output=stdout, stderr=stderr)
E subprocess.CalledProcessError: Command 'echo quit | openssl s_client -connect ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63601 -msg -cipher DH | grep -A 1 ServerKeyExchange' returned non-zero exit status 1.

/usr/lib64/python3.8/subprocess.py:512: CalledProcessError
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
------------------------------Captured stderr call------------------------------
depth=1 C = AU, ST = Queensland, L = 389ds, O = testing, CN = ssca.389ds.example.com verify return:1 depth=0 C = AU, ST = Queensland, L = 389ds, O = testing, GN = e642c4c0-be46-4551-bbd0-de48c83ca2ec, CN = ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com verify return:1 DONE
Failed tickets/ticket48808_test.py::test_ticket48808 5.69
topology_st = <lib389.topologies.TopologyMain object at 0x7fd166bd9460>
create_user = None

def test_ticket48808(topology_st, create_user):
log.info('Run multiple paging controls on a single connection')
users_num = 100
page_size = 30
users_list = add_users(topology_st, users_num)
search_flt = r'(uid=test*)'
searchreq_attrlist = ['dn', 'sn']

log.info('Set user bind')
topology_st.standalone.simple_bind_s(TEST_USER_DN, TEST_USER_PWD)

log.info('Create simple paged results control instance')
req_ctrl = SimplePagedResultsControl(True, size=page_size, cookie='')
controls = [req_ctrl]

for ii in range(3):
log.info('Iteration %d' % ii)
msgid = topology_st.standalone.search_ext(DEFAULT_SUFFIX,
ldap.SCOPE_SUBTREE,
search_flt,
searchreq_attrlist,
serverctrls=controls)
rtype, rdata, rmsgid, rctrls = topology_st.standalone.result3(msgid)
pctrls = [
c
for c in rctrls
if c.controlType == SimplePagedResultsControl.controlType
]

req_ctrl.cookie = pctrls[0].cookie
msgid = topology_st.standalone.search_ext(DEFAULT_SUFFIX,
ldap.SCOPE_SUBTREE,
search_flt,
searchreq_attrlist,
serverctrls=controls)
log.info('Set Directory Manager bind back')
topology_st.standalone.simple_bind_s(DN_DM, PASSWORD)
del_users(topology_st, users_list)

log.info('Abandon the search')
users_num = 10
page_size = 0
users_list = add_users(topology_st, users_num)
search_flt = r'(uid=test*)'
searchreq_attrlist = ['dn', 'sn']

log.info('Set user bind')
topology_st.standalone.simple_bind_s(TEST_USER_DN, TEST_USER_PWD)

log.info('Create simple paged results control instance')
req_ctrl = SimplePagedResultsControl(True, size=page_size, cookie='')
controls = [req_ctrl]

msgid = topology_st.standalone.search_ext(DEFAULT_SUFFIX,
ldap.SCOPE_SUBTREE,
search_flt,
searchreq_attrlist,
serverctrls=controls)
rtype, rdata, rmsgid, rctrls = topology_st.standalone.result3(msgid)
pctrls = [
c
for c in rctrls
if c.controlType == SimplePagedResultsControl.controlType
]
assert not pctrls[0].cookie

log.info('Set Directory Manager bind back')
topology_st.standalone.simple_bind_s(DN_DM, PASSWORD)
del_users(topology_st, users_list)

log.info("Search should fail with 'nsPagedSizeLimit = 5'"
"and 'nsslapd-pagedsizelimit = 15' with 10 users")
conf_attr = b'15'
user_attr = b'5'
expected_rs = ldap.SIZELIMIT_EXCEEDED
users_num = 10
page_size = 10
users_list = add_users(topology_st, users_num)
search_flt = r'(uid=test*)'
searchreq_attrlist = ['dn', 'sn']
conf_attr_bck = change_conf_attr(topology_st, DN_CONFIG,
'nsslapd-pagedsizelimit', conf_attr)
user_attr_bck = change_conf_attr(topology_st, TEST_USER_DN,
'nsPagedSizeLimit', user_attr)

log.info('Set user bind')
topology_st.standalone.simple_bind_s(TEST_USER_DN, TEST_USER_PWD)

log.info('Create simple paged results control instance')
req_ctrl = SimplePagedResultsControl(True, size=page_size, cookie='')
controls = [req_ctrl]

log.info('Expect to fail with SIZELIMIT_EXCEEDED')
with pytest.raises(expected_rs):
> all_results = paged_search(topology_st, controls,
search_flt, searchreq_attrlist)
E Failed: DID NOT RAISE <class 'ldap.SIZELIMIT_EXCEEDED'>

/export/tests/tickets/ticket48808_test.py:252: Failed
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48808_test:ticket48808_test.py:159 Run multiple paging controls on a single connection INFO  tests.tickets.ticket48808_test:ticket48808_test.py:48 Adding 100 users INFO  tests.tickets.ticket48808_test:ticket48808_test.py:166 Set user bind INFO  tests.tickets.ticket48808_test:ticket48808_test.py:169 Create simple paged results control instance INFO  tests.tickets.ticket48808_test:ticket48808_test.py:174 Iteration 0 INFO  tests.tickets.ticket48808_test:ticket48808_test.py:174 Iteration 1 INFO  tests.tickets.ticket48808_test:ticket48808_test.py:174 Iteration 2 INFO  tests.tickets.ticket48808_test:ticket48808_test.py:193 Set Directory Manager bind back INFO  tests.tickets.ticket48808_test:ticket48808_test.py:75 Deleting 100 users INFO  tests.tickets.ticket48808_test:ticket48808_test.py:197 Abandon the search INFO  tests.tickets.ticket48808_test:ticket48808_test.py:48 Adding 10 users INFO  tests.tickets.ticket48808_test:ticket48808_test.py:204 Set user bind INFO  tests.tickets.ticket48808_test:ticket48808_test.py:207 Create simple paged results control instance INFO  tests.tickets.ticket48808_test:ticket48808_test.py:224 Set Directory Manager bind back INFO  tests.tickets.ticket48808_test:ticket48808_test.py:75 Deleting 10 users INFO  tests.tickets.ticket48808_test:ticket48808_test.py:228 Search should fail with 'nsPagedSizeLimit = 5'and 'nsslapd-pagedsizelimit = 15' with 10 users INFO  tests.tickets.ticket48808_test:ticket48808_test.py:48 Adding 10 users INFO  tests.tickets.ticket48808_test:ticket48808_test.py:95 Set nsslapd-pagedsizelimit to b'15'. Previous value - [b'0']. Modified suffix - cn=config. INFO  tests.tickets.ticket48808_test:ticket48808_test.py:95 Set nsPagedSizeLimit to b'5'. Previous value - None. Modified suffix - uid=simplepaged_test,dc=example,dc=com. INFO  tests.tickets.ticket48808_test:ticket48808_test.py:243 Set user bind INFO  tests.tickets.ticket48808_test:ticket48808_test.py:246 Create simple paged results control instance INFO  tests.tickets.ticket48808_test:ticket48808_test.py:250 Expect to fail with SIZELIMIT_EXCEEDED INFO  tests.tickets.ticket48808_test:ticket48808_test.py:130 Getting page 0
Failed tickets/ticket48896_test.py::test_ticket48896 0.20
server = <lib389.DirSrv object at 0x7fd166851700>, curpw = 'password'
newpw = 'Abcd012+', expstr = 'be ok', rc = 0

def replace_pw(server, curpw, newpw, expstr, rc):
log.info('Binding as {%s, %s}' % (TESTDN, curpw))
server.simple_bind_s(TESTDN, curpw)

hit = 0
log.info('Replacing password: %s -> %s, which should %s' % (curpw, newpw, expstr))
try:
> server.modify_s(TESTDN, [(ldap.MOD_REPLACE, 'userPassword', ensure_bytes(newpw))])

/export/tests/tickets/ticket48896_test.py:53:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

args = ('uid=buser123,dc=example,dc=com', [(2, 'userPassword', b'Abcd012+')])
kwargs = {}
c_stack = [FrameInfo(frame=<frame at 0x559a4f88a8a0, file '/usr/local/lib/python3.8/site-packages/lib389/__init__.py', line 180,...mbda>', code_context=[' self._inner_hookexec = lambda hook, methods, kwargs: hook.multicall(\n'], index=0), ...]
frame = FrameInfo(frame=<frame at 0x559a4fa3acd0, file '/export/tests/tickets/ticket48896_test.py', line 57, code replace_pw>,...code_context=[" server.modify_s(TESTDN, [(ldap.MOD_REPLACE, 'userPassword', ensure_bytes(newpw))])\n"], index=0)

def inner(*args, **kwargs):
if name in [
'add_s',
'bind_s',
'delete_s',
'modify_s',
'modrdn_s',
'rename_s',
'sasl_interactive_bind_s',
'search_s',
'search_ext_s',
'simple_bind_s',
'unbind_s',
'getEntry',
] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'):
c_stack = inspect.stack()
frame = c_stack[1]

warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. "
"Found in: %s:%s" % (name, frame.filename, frame.lineno)))
# Later, we will add a sleep here to make it even more painful.
# Finally, it will raise an exception.
elif 'escapehatch' in kwargs:
kwargs.pop('escapehatch')

if name == 'result':
objtype, data = f(*args, **kwargs)
# data is either a 2-tuple or a list of 2-tuples
# print data
if data:
if isinstance(data, tuple):
return objtype, Entry(data)
elif isinstance(data, list):
# AD sends back these search references
# if objtype == ldap.RES_SEARCH_RESULT and \
# isinstance(data[-1],tuple) and \
# not data[-1][0]:
# print "Received search reference: "
# pprint.pprint(data[-1][1])
# data.pop() # remove the last non-entry element

return objtype, [Entry(x) for x in data]
else:
raise TypeError("unknown data type %s returned by result" %
type(data))
else:
return objtype, data
elif name.startswith('add'):
# the first arg is self
# the second and third arg are the dn and the data to send
# We need to convert the Entry into the format used by
# python-ldap
ent = args[0]
if isinstance(ent, Entry):
return f(ent.dn, ent.toTupleList(), *args[2:])
else:
return f(*args, **kwargs)
else:
> return f(*args, **kwargs)

/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd166851700>
dn = 'uid=buser123,dc=example,dc=com'
modlist = [(2, 'userPassword', b'Abcd012+')]

def modify_s(self,dn,modlist):
> return self.modify_ext_s(dn,modlist,None,None)

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:640:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

args = ('uid=buser123,dc=example,dc=com', [(2, 'userPassword', b'Abcd012+')], None, None)
kwargs = {}

def inner(*args, **kwargs):
if name in [
'add_s',
'bind_s',
'delete_s',
'modify_s',
'modrdn_s',
'rename_s',
'sasl_interactive_bind_s',
'search_s',
'search_ext_s',
'simple_bind_s',
'unbind_s',
'getEntry',
] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'):
c_stack = inspect.stack()
frame = c_stack[1]

warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. "
"Found in: %s:%s" % (name, frame.filename, frame.lineno)))
# Later, we will add a sleep here to make it even more painful.
# Finally, it will raise an exception.
elif 'escapehatch' in kwargs:
kwargs.pop('escapehatch')

if name == 'result':
objtype, data = f(*args, **kwargs)
# data is either a 2-tuple or a list of 2-tuples
# print data
if data:
if isinstance(data, tuple):
return objtype, Entry(data)
elif isinstance(data, list):
# AD sends back these search references
# if objtype == ldap.RES_SEARCH_RESULT and \
# isinstance(data[-1],tuple) and \
# not data[-1][0]:
# print "Received search reference: "
# pprint.pprint(data[-1][1])
# data.pop() # remove the last non-entry element

return objtype, [Entry(x) for x in data]
else:
raise TypeError("unknown data type %s returned by result" %
type(data))
else:
return objtype, data
elif name.startswith('add'):
# the first arg is self
# the second and third arg are the dn and the data to send
# We need to convert the Entry into the format used by
# python-ldap
ent = args[0]
if isinstance(ent, Entry):
return f(ent.dn, ent.toTupleList(), *args[2:])
else:
return f(*args, **kwargs)
else:
> return f(*args, **kwargs)

/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd166851700>
dn = 'uid=buser123,dc=example,dc=com'
modlist = [(2, 'userPassword', b'Abcd012+')], serverctrls = None
clientctrls = None

def modify_ext_s(self,dn,modlist,serverctrls=None,clientctrls=None):
msgid = self.modify_ext(dn,modlist,serverctrls,clientctrls)
> resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout)

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:613:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

args = (8,), kwargs = {'all': 1, 'timeout': -1}

def inner(*args, **kwargs):
if name in [
'add_s',
'bind_s',
'delete_s',
'modify_s',
'modrdn_s',
'rename_s',
'sasl_interactive_bind_s',
'search_s',
'search_ext_s',
'simple_bind_s',
'unbind_s',
'getEntry',
] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'):
c_stack = inspect.stack()
frame = c_stack[1]

warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. "
"Found in: %s:%s" % (name, frame.filename, frame.lineno)))
# Later, we will add a sleep here to make it even more painful.
# Finally, it will raise an exception.
elif 'escapehatch' in kwargs:
kwargs.pop('escapehatch')

if name == 'result':
objtype, data = f(*args, **kwargs)
# data is either a 2-tuple or a list of 2-tuples
# print data
if data:
if isinstance(data, tuple):
return objtype, Entry(data)
elif isinstance(data, list):
# AD sends back these search references
# if objtype == ldap.RES_SEARCH_RESULT and \
# isinstance(data[-1],tuple) and \
# not data[-1][0]:
# print "Received search reference: "
# pprint.pprint(data[-1][1])
# data.pop() # remove the last non-entry element

return objtype, [Entry(x) for x in data]
else:
raise TypeError("unknown data type %s returned by result" %
type(data))
else:
return objtype, data
elif name.startswith('add'):
# the first arg is self
# the second and third arg are the dn and the data to send
# We need to convert the Entry into the format used by
# python-ldap
ent = args[0]
if isinstance(ent, Entry):
return f(ent.dn, ent.toTupleList(), *args[2:])
else:
return f(*args, **kwargs)
else:
> return f(*args, **kwargs)

/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd166851700>, msgid = 8, all = 1
timeout = -1, resp_ctrl_classes = None

def result3(self,msgid=ldap.RES_ANY,all=1,timeout=None,resp_ctrl_classes=None):
> resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4(
msgid,all,timeout,
add_ctrls=0,add_intermediates=0,add_extop=0,
resp_ctrl_classes=resp_ctrl_classes
)

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:764:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

args = (8, 1, -1)
kwargs = {'add_ctrls': 0, 'add_extop': 0, 'add_intermediates': 0, 'resp_ctrl_classes': None}

def inner(*args, **kwargs):
if name in [
'add_s',
'bind_s',
'delete_s',
'modify_s',
'modrdn_s',
'rename_s',
'sasl_interactive_bind_s',
'search_s',
'search_ext_s',
'simple_bind_s',
'unbind_s',
'getEntry',
] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'):
c_stack = inspect.stack()
frame = c_stack[1]

warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. "
"Found in: %s:%s" % (name, frame.filename, frame.lineno)))
# Later, we will add a sleep here to make it even more painful.
# Finally, it will raise an exception.
elif 'escapehatch' in kwargs:
kwargs.pop('escapehatch')

if name == 'result':
objtype, data = f(*args, **kwargs)
# data is either a 2-tuple or a list of 2-tuples
# print data
if data:
if isinstance(data, tuple):
return objtype, Entry(data)
elif isinstance(data, list):
# AD sends back these search references
# if objtype == ldap.RES_SEARCH_RESULT and \
# isinstance(data[-1],tuple) and \
# not data[-1][0]:
# print "Received search reference: "
# pprint.pprint(data[-1][1])
# data.pop() # remove the last non-entry element

return objtype, [Entry(x) for x in data]
else:
raise TypeError("unknown data type %s returned by result" %
type(data))
else:
return objtype, data
elif name.startswith('add'):
# the first arg is self
# the second and third arg are the dn and the data to send
# We need to convert the Entry into the format used by
# python-ldap
ent = args[0]
if isinstance(ent, Entry):
return f(ent.dn, ent.toTupleList(), *args[2:])
else:
return f(*args, **kwargs)
else:
> return f(*args, **kwargs)

/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd166851700>, msgid = 8, all = 1
timeout = -1, add_ctrls = 0, add_intermediates = 0, add_extop = 0
resp_ctrl_classes = None

def result4(self,msgid=ldap.RES_ANY,all=1,timeout=None,add_ctrls=0,add_intermediates=0,add_extop=0,resp_ctrl_classes=None):
if timeout is None:
timeout = self.timeout
> ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop)

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:774:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

args = (<built-in method result4 of LDAP object at 0x7fd166870d80>, 8, 1, -1, 0, 0, ...)
kwargs = {}

def inner(*args, **kwargs):
if name in [
'add_s',
'bind_s',
'delete_s',
'modify_s',
'modrdn_s',
'rename_s',
'sasl_interactive_bind_s',
'search_s',
'search_ext_s',
'simple_bind_s',
'unbind_s',
'getEntry',
] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'):
c_stack = inspect.stack()
frame = c_stack[1]

warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. "
"Found in: %s:%s" % (name, frame.filename, frame.lineno)))
# Later, we will add a sleep here to make it even more painful.
# Finally, it will raise an exception.
elif 'escapehatch' in kwargs:
kwargs.pop('escapehatch')

if name == 'result':
objtype, data = f(*args, **kwargs)
# data is either a 2-tuple or a list of 2-tuples
# print data
if data:
if isinstance(data, tuple):
return objtype, Entry(data)
elif isinstance(data, list):
# AD sends back these search references
# if objtype == ldap.RES_SEARCH_RESULT and \
# isinstance(data[-1],tuple) and \
# not data[-1][0]:
# print "Received search reference: "
# pprint.pprint(data[-1][1])
# data.pop() # remove the last non-entry element

return objtype, [Entry(x) for x in data]
else:
raise TypeError("unknown data type %s returned by result" %
type(data))
else:
return objtype, data
elif name.startswith('add'):
# the first arg is self
# the second and third arg are the dn and the data to send
# We need to convert the Entry into the format used by
# python-ldap
ent = args[0]
if isinstance(ent, Entry):
return f(ent.dn, ent.toTupleList(), *args[2:])
else:
return f(*args, **kwargs)
else:
> return f(*args, **kwargs)

/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd166851700>
func = <built-in method result4 of LDAP object at 0x7fd166870d80>
args = (8, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None
exc_type = None, exc_value = None, exc_traceback = None

def _ldap_call(self,func,*args,**kwargs):
"""
Wrapper method mainly for serializing calls into OpenLDAP libs
and trace logs
"""
self._ldap_object_lock.acquire()
if __debug__:
if self._trace_level>=1:
self._trace_file.write('*** %s %s - %s\n%s\n' % (
repr(self),
self._uri,
'.'.join((self.__class__.__name__,func.__name__)),
pprint.pformat((args,kwargs))
))
if self._trace_level>=9:
traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)
diagnostic_message_success = None
try:
try:
result = func(*args,**kwargs)
if __debug__ and self._trace_level>=2:
if func.__name__!="unbind_ext":
diagnostic_message_success = self._l.get_option(ldap.OPT_DIAGNOSTIC_MESSAGE)
finally:
self._ldap_object_lock.release()
except LDAPError as e:
exc_type,exc_value,exc_traceback = sys.exc_info()
try:
if 'info' not in e.args[0] and 'errno' in e.args[0]:
e.args[0]['info'] = strerror(e.args[0]['errno'])
except IndexError:
pass
if __debug__ and self._trace_level>=2:
self._trace_file.write('=> LDAPError - %s: %s\n' % (e.__class__.__name__,str(e)))
try:
> reraise(exc_type, exc_value, exc_traceback)

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:340:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

exc_type = <class 'ldap.INSUFFICIENT_ACCESS'>
exc_value = INSUFFICIENT_ACCESS({'msgtype': 103, 'msgid': 8, 'result': 50, 'desc': 'Insufficient access', 'ctrls': [], 'info': "Insufficient 'write' privilege to the 'userPassword' attribute of entry 'uid=buser123,dc=example,dc=com'.\n"})
exc_traceback = <traceback object at 0x7fd166c0cf80>

def reraise(exc_type, exc_value, exc_traceback):
"""Re-raise an exception given information from sys.exc_info()

Note that unlike six.reraise, this does not support replacing the
traceback. All arguments must come from a single sys.exc_info() call.
"""
# In Python 3, all exception info is contained in one object.
> raise exc_value

/usr/local/lib64/python3.8/site-packages/ldap/compat.py:46:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd166851700>
func = <built-in method result4 of LDAP object at 0x7fd166870d80>
args = (8, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None
exc_type = None, exc_value = None, exc_traceback = None

def _ldap_call(self,func,*args,**kwargs):
"""
Wrapper method mainly for serializing calls into OpenLDAP libs
and trace logs
"""
self._ldap_object_lock.acquire()
if __debug__:
if self._trace_level>=1:
self._trace_file.write('*** %s %s - %s\n%s\n' % (
repr(self),
self._uri,
'.'.join((self.__class__.__name__,func.__name__)),
pprint.pformat((args,kwargs))
))
if self._trace_level>=9:
traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)
diagnostic_message_success = None
try:
try:
> result = func(*args,**kwargs)
E ldap.INSUFFICIENT_ACCESS: {'msgtype': 103, 'msgid': 8, 'result': 50, 'desc': 'Insufficient access', 'ctrls': [], 'info': "Insufficient 'write' privilege to the 'userPassword' attribute of entry 'uid=buser123,dc=example,dc=com'.\n"}

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:324: INSUFFICIENT_ACCESS

During handling of the above exception, another exception occurred:

topology_st = <lib389.topologies.TopologyMain object at 0x7fd1668514c0>

def test_ticket48896(topology_st):
"""
"""
log.info('Testing Ticket 48896 - Default Setting for passwordMinTokenLength does not work')

log.info("Setting global password policy with password syntax.")
topology_st.standalone.simple_bind_s(DN_DM, PASSWORD)
topology_st.standalone.modify_s(CONFIG_DN, [(ldap.MOD_REPLACE, 'passwordCheckSyntax', b'on'),
(ldap.MOD_REPLACE, 'nsslapd-pwpolicy-local', b'on')])

config = topology_st.standalone.search_s(CONFIG_DN, ldap.SCOPE_BASE, 'cn=*')
mintokenlen = config[0].getValue('passwordMinTokenLength')
history = config[0].getValue('passwordInHistory')

log.info('Default passwordMinTokenLength == %s' % mintokenlen)
log.info('Default passwordInHistory == %s' % history)

log.info('Adding a user.')
curpw = 'password'
topology_st.standalone.add_s(Entry((TESTDN,
{'objectclass': "top person organizationalPerson inetOrgPerson".split(),
'cn': 'test user',
'sn': 'user',
'userPassword': curpw})))

newpw = 'Abcd012+'
exp = 'be ok'
rc = 0
> replace_pw(topology_st.standalone, curpw, newpw, exp, rc)

/export/tests/tickets/ticket48896_test.py:94:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

server = <lib389.DirSrv object at 0x7fd166851700>, curpw = 'password'
newpw = 'Abcd012+', expstr = 'be ok', rc = 0

def replace_pw(server, curpw, newpw, expstr, rc):
log.info('Binding as {%s, %s}' % (TESTDN, curpw))
server.simple_bind_s(TESTDN, curpw)

hit = 0
log.info('Replacing password: %s -> %s, which should %s' % (curpw, newpw, expstr))
try:
server.modify_s(TESTDN, [(ldap.MOD_REPLACE, 'userPassword', ensure_bytes(newpw))])
except Exception as e:
log.info("Exception (expected): %s" % type(e).__name__)
hit = 1
> assert isinstance(e, rc)
E TypeError: isinstance() arg 2 must be a type or tuple of types

/export/tests/tickets/ticket48896_test.py:57: TypeError
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48896_test:ticket48896_test.py:69 Testing Ticket 48896 - Default Setting for passwordMinTokenLength does not work INFO  tests.tickets.ticket48896_test:ticket48896_test.py:71 Setting global password policy with password syntax. INFO  tests.tickets.ticket48896_test:ticket48896_test.py:80 Default passwordMinTokenLength == b'3' INFO  tests.tickets.ticket48896_test:ticket48896_test.py:81 Default passwordInHistory == b'6' INFO  tests.tickets.ticket48896_test:ticket48896_test.py:83 Adding a user. INFO  tests.tickets.ticket48896_test:ticket48896_test.py:47 Binding as {uid=buser123,dc=example,dc=com, password} INFO  tests.tickets.ticket48896_test:ticket48896_test.py:51 Replacing password: password -> Abcd012+, which should be ok INFO  tests.tickets.ticket48896_test:ticket48896_test.py:55 Exception (expected): INSUFFICIENT_ACCESS
Failed tickets/ticket48916_test.py::test_ticket48916 48.81
topology_m2 = <lib389.topologies.TopologyMain object at 0x7fd166ad75b0>

def test_ticket48916(topology_m2):
"""
https://bugzilla.redhat.com/show_bug.cgi?id=1353629

This is an issue with ID exhaustion in DNA causing a crash.

To access each DirSrv instance use: topology_m2.ms["master1"], topology_m2.ms["master2"],
..., topology_m2.hub1, ..., topology_m2.consumer1,...


"""

if DEBUGGING:
# Add debugging steps(if any)...
pass

# Enable the plugin on both servers

dna_m1 = topology_m2.ms["master1"].plugins.get('Distributed Numeric Assignment Plugin')
dna_m2 = topology_m2.ms["master2"].plugins.get('Distributed Numeric Assignment Plugin')

# Configure it
# Create the container for the ranges to go into.

topology_m2.ms["master1"].add_s(Entry(
('ou=Ranges,%s' % DEFAULT_SUFFIX, {
'objectClass': 'top organizationalUnit'.split(' '),
'ou': 'Ranges',
})
))

# Create the dnaAdmin?

# For now we just pinch the dn from the dna_m* types, and add the relevant child config
# but in the future, this could be a better plugin template type from lib389

config_dn = dna_m1.dn

topology_m2.ms["master1"].add_s(Entry(
('cn=uids,%s' % config_dn, {
'objectClass': 'top dnaPluginConfig'.split(' '),
'cn': 'uids',
'dnatype': 'uidNumber gidNumber'.split(' '),
'dnafilter': '(objectclass=posixAccount)',
'dnascope': '%s' % DEFAULT_SUFFIX,
'dnaNextValue': '1',
'dnaMaxValue': '50',
'dnasharedcfgdn': 'ou=Ranges,%s' % DEFAULT_SUFFIX,
'dnaThreshold': '0',
'dnaRangeRequestTimeout': '60',
'dnaMagicRegen': '-1',
'dnaRemoteBindDN': 'uid=dnaAdmin,ou=People,%s' % DEFAULT_SUFFIX,
'dnaRemoteBindCred': 'secret123',
'dnaNextRange': '80-90'
})
))

topology_m2.ms["master2"].add_s(Entry(
('cn=uids,%s' % config_dn, {
'objectClass': 'top dnaPluginConfig'.split(' '),
'cn': 'uids',
'dnatype': 'uidNumber gidNumber'.split(' '),
'dnafilter': '(objectclass=posixAccount)',
'dnascope': '%s' % DEFAULT_SUFFIX,
'dnaNextValue': '61',
'dnaMaxValue': '70',
'dnasharedcfgdn': 'ou=Ranges,%s' % DEFAULT_SUFFIX,
'dnaThreshold': '2',
'dnaRangeRequestTimeout': '60',
'dnaMagicRegen': '-1',
'dnaRemoteBindDN': 'uid=dnaAdmin,ou=People,%s' % DEFAULT_SUFFIX,
'dnaRemoteBindCred': 'secret123',
})
))

# Enable the plugins
dna_m1.enable()
dna_m2.enable()

# Restart the instances
topology_m2.ms["master1"].restart(60)
topology_m2.ms["master2"].restart(60)

# Wait for a replication .....
time.sleep(40)

# Allocate the 10 members to exhaust

for i in range(1, 11):
_create_user(topology_m2.ms["master2"], i)

# Allocate the 11th
> _create_user(topology_m2.ms["master2"], 11)

/export/tests/tickets/ticket48916_test.py:126:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/export/tests/tickets/ticket48916_test.py:21: in _create_user
inst.add_s(Entry(
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:176: in inner
return f(ent.dn, ent.toTupleList(), *args[2:])
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:439: in add_s
return self.add_ext_s(dn,modlist,None,None)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:178: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:425: in add_ext_s
resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:764: in result3
resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4(
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:774: in result4
ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call
reraise(exc_type, exc_value, exc_traceback)
/usr/local/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise
raise exc_value
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd166854f70>
func = <built-in method result4 of LDAP object at 0x7fd17aa7b750>
args = (13, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None
exc_type = None, exc_value = None, exc_traceback = None

def _ldap_call(self,func,*args,**kwargs):
"""
Wrapper method mainly for serializing calls into OpenLDAP libs
and trace logs
"""
self._ldap_object_lock.acquire()
if __debug__:
if self._trace_level>=1:
self._trace_file.write('*** %s %s - %s\n%s\n' % (
repr(self),
self._uri,
'.'.join((self.__class__.__name__,func.__name__)),
pprint.pformat((args,kwargs))
))
if self._trace_level>=9:
traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)
diagnostic_message_success = None
try:
try:
> result = func(*args,**kwargs)
E ldap.OPERATIONS_ERROR: {'msgtype': 105, 'msgid': 13, 'result': 1, 'desc': 'Operations error', 'ctrls': [], 'info': 'Allocation of a new value for range cn=uids,cn=distributed numeric assignment plugin,cn=plugins,cn=config failed! Unable to proceed.'}

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:324: OPERATIONS_ERROR
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 06829afa-f3b6-43a0-8e8a-47e63253e966 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 8df1a77a-1d8a-4932-9ace-d965b4a8ef9d / got description=06829afa-f3b6-43a0-8e8a-47e63253e966) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
Failed tickets/ticket48956_test.py::test_ticket48956 6.38
topology_st = <lib389.topologies.TopologyMain object at 0x7fd165e4dd00>

def test_ticket48956(topology_st):
"""Write your testcase here...

Also, if you need any testcase initialization,
please, write additional fixture for that(include finalizer).

"""

topology_st.standalone.modify_s(ACCT_POLICY_PLUGIN_DN,
[(ldap.MOD_REPLACE, 'nsslapd-pluginarg0', ensure_bytes(ACCT_POLICY_CONFIG_DN))])

topology_st.standalone.modify_s(ACCT_POLICY_CONFIG_DN, [(ldap.MOD_REPLACE, 'alwaysrecordlogin', b'yes'),
(ldap.MOD_REPLACE, 'stateattrname', b'lastLoginTime'),
(ldap.MOD_REPLACE, 'altstateattrname', b'createTimestamp'),
(ldap.MOD_REPLACE, 'specattrname', b'acctPolicySubentry'),
(ldap.MOD_REPLACE, 'limitattrname',
b'accountInactivityLimit')])

# Enable the plugins
topology_st.standalone.plugins.enable(name=PLUGIN_ACCT_POLICY)
topology_st.standalone.restart(timeout=10)

# Check inactivity on standard suffix (short)
> _check_inactivity(topology_st, SUFFIX)

/export/tests/tickets/ticket48956_test.py:107:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/export/tests/tickets/ticket48956_test.py:78: in _check_inactivity
assert (_check_status(topology_st, TEST_USER_DN, b'- activated'))
/export/tests/tickets/ticket48956_test.py:39: in _check_status
output = subprocess.check_output([nsaccountstatus, '-Z', topology_st.standalone.serverid,
/usr/lib64/python3.8/subprocess.py:411: in check_output
return run(*popenargs, stdout=PIPE, timeout=timeout, check=True,
/usr/lib64/python3.8/subprocess.py:489: in run
with Popen(*popenargs, **kwargs) as process:
/usr/lib64/python3.8/subprocess.py:854: in __init__
self._execute_child(args, executable, preexec_fn, close_fds,
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <subprocess.Popen object at 0x7fd165e41880>
args = ['/usr/sbin/ns-accountstatus.pl', '-Z', 'standalone1', '-D', 'cn=Directory Manager', '-w', ...]
executable = b'/usr/sbin/ns-accountstatus.pl', preexec_fn = None
close_fds = True, pass_fds = (), cwd = None, env = None, startupinfo = None
creationflags = 0, shell = False, p2cread = -1, p2cwrite = -1, c2pread = 48
c2pwrite = 49, errread = -1, errwrite = -1, restore_signals = True
start_new_session = False

def _execute_child(self, args, executable, preexec_fn, close_fds,
pass_fds, cwd, env,
startupinfo, creationflags, shell,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite,
restore_signals, start_new_session):
"""Execute program (POSIX version)"""

if isinstance(args, (str, bytes)):
args = [args]
elif isinstance(args, os.PathLike):
if shell:
raise TypeError('path-like args is not allowed when '
'shell is true')
args = [args]
else:
args = list(args)

if shell:
# On Android the default shell is at '/system/bin/sh'.
unix_shell = ('/system/bin/sh' if
hasattr(sys, 'getandroidapilevel') else '/bin/sh')
args = [unix_shell, "-c"] + args
if executable:
args[0] = executable

if executable is None:
executable = args[0]

sys.audit("subprocess.Popen", executable, args, cwd, env)

if (_USE_POSIX_SPAWN
and os.path.dirname(executable)
and preexec_fn is None
and not close_fds
and not pass_fds
and cwd is None
and (p2cread == -1 or p2cread > 2)
and (c2pwrite == -1 or c2pwrite > 2)
and (errwrite == -1 or errwrite > 2)
and not start_new_session):
self._posix_spawn(args, executable, env, restore_signals,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite)
return

orig_executable = executable

# For transferring possible exec failure from child to parent.
# Data format: "exception name:hex errno:description"
# Pickle is not used; it is complex and involves memory allocation.
errpipe_read, errpipe_write = os.pipe()
# errpipe_write must not be in the standard io 0, 1, or 2 fd range.
low_fds_to_close = []
while errpipe_write < 3:
low_fds_to_close.append(errpipe_write)
errpipe_write = os.dup(errpipe_write)
for low_fd in low_fds_to_close:
os.close(low_fd)
try:
try:
# We must avoid complex work that could involve
# malloc or free in the child process to avoid
# potential deadlocks, thus we do all this here.
# and pass it to fork_exec()

if env is not None:
env_list = []
for k, v in env.items():
k = os.fsencode(k)
if b'=' in k:
raise ValueError("illegal environment variable name")
env_list.append(k + b'=' + os.fsencode(v))
else:
env_list = None # Use execv instead of execve.
executable = os.fsencode(executable)
if os.path.dirname(executable):
executable_list = (executable,)
else:
# This matches the behavior of os._execvpe().
executable_list = tuple(
os.path.join(os.fsencode(dir), executable)
for dir in os.get_exec_path(env))
fds_to_keep = set(pass_fds)
fds_to_keep.add(errpipe_write)
self.pid = _posixsubprocess.fork_exec(
args, executable_list,
close_fds, tuple(sorted(map(int, fds_to_keep))),
cwd, env_list,
p2cread, p2cwrite, c2pread, c2pwrite,
errread, errwrite,
errpipe_read, errpipe_write,
restore_signals, start_new_session, preexec_fn)
self._child_created = True
finally:
# be sure the FD is closed no matter what
os.close(errpipe_write)

self._close_pipe_fds(p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite)

# Wait for exec to fail or succeed; possibly raising an
# exception (limited in size)
errpipe_data = bytearray()
while True:
part = os.read(errpipe_read, 50000)
errpipe_data += part
if not part or len(errpipe_data) > 50000:
break
finally:
# be sure the FD is closed no matter what
os.close(errpipe_read)

if errpipe_data:
try:
pid, sts = os.waitpid(self.pid, 0)
if pid == self.pid:
self._handle_exitstatus(sts)
else:
self.returncode = sys.maxsize
except ChildProcessError:
pass

try:
exception_name, hex_errno, err_msg = (
errpipe_data.split(b':', 2))
# The encoding here should match the encoding
# written in by the subprocess implementations
# like _posixsubprocess
err_msg = err_msg.decode()
except ValueError:
exception_name = b'SubprocessError'
hex_errno = b'0'
err_msg = 'Bad exception data from child: {!r}'.format(
bytes(errpipe_data))
child_exception_type = getattr(
builtins, exception_name.decode('ascii'),
SubprocessError)
if issubclass(child_exception_type, OSError) and hex_errno:
errno_num = int(hex_errno, 16)
child_exec_never_called = (err_msg == "noexec")
if child_exec_never_called:
err_msg = ""
# The error must be from chdir(cwd).
err_filename = cwd
else:
err_filename = orig_executable
if errno_num != 0:
err_msg = os.strerror(errno_num)
> raise child_exception_type(errno_num, err_msg, err_filename)
E FileNotFoundError: [Errno 2] No such file or directory: '/usr/sbin/ns-accountstatus.pl'

/usr/lib64/python3.8/subprocess.py:1702: FileNotFoundError
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48956_test:ticket48956_test.py:54 ######################### Adding Account Policy entry: cn=Account Inactivation Policy,dc=example,dc=com ###################### INFO  tests.tickets.ticket48956_test:ticket48956_test.py:61 ######################### Adding Test User entry: uid=ticket48956user,dc=example,dc=com ######################
Failed tickets/ticket48961_test.py::test_ticket48961_storagescheme 0.01
topology_st = <lib389.topologies.TopologyMain object at 0x7fd1669fee50>

def test_ticket48961_storagescheme(topology_st):
"""
Test deleting of the storage scheme.
"""

default = topology_st.standalone.config.get_attr_val('passwordStorageScheme')
# Change it
topology_st.standalone.config.set('passwordStorageScheme', 'CLEAR')
# Now delete it
> topology_st.standalone.config.remove('passwordStorageScheme', None)

/export/tests/tickets/ticket48961_test.py:28:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:316: in remove
self.set(key, value, action=ldap.MOD_DELETE)
/usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:446: in set
return self._instance.modify_ext_s(self._dn, [(action, key, value)],
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:613: in modify_ext_s
resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:764: in result3
resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4(
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:774: in result4
ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call
reraise(exc_type, exc_value, exc_traceback)
/usr/local/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise
raise exc_value
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd1669fedf0>
func = <built-in method result4 of LDAP object at 0x7fd1669c8ae0>
args = (5, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None
exc_type = None, exc_value = None, exc_traceback = None

def _ldap_call(self,func,*args,**kwargs):
"""
Wrapper method mainly for serializing calls into OpenLDAP libs
and trace logs
"""
self._ldap_object_lock.acquire()
if __debug__:
if self._trace_level>=1:
self._trace_file.write('*** %s %s - %s\n%s\n' % (
repr(self),
self._uri,
'.'.join((self.__class__.__name__,func.__name__)),
pprint.pformat((args,kwargs))
))
if self._trace_level>=9:
traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)
diagnostic_message_success = None
try:
try:
> result = func(*args,**kwargs)
E ldap.OPERATIONS_ERROR: {'msgtype': 103, 'msgid': 5, 'result': 1, 'desc': 'Operations error', 'ctrls': [], 'info': 'passwordStorageScheme: deleting the value is not allowed.'}

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:324: OPERATIONS_ERROR
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Failed tickets/ticket48961_test.py::test_ticket48961_deleteall 0.00
topology_st = <lib389.topologies.TopologyMain object at 0x7fd1669fee50>

def test_ticket48961_deleteall(topology_st):
"""
Test that we can delete all valid attrs, and that a few are rejected.
"""
attr_to_test = {
'nsslapd-listenhost': 'localhost',
'nsslapd-securelistenhost': 'localhost',
'nsslapd-allowed-sasl-mechanisms': 'GSSAPI',
'nsslapd-svrtab': 'Some bogus data', # This one could reset?
}
attr_to_fail = {
# These are the values that should always be dn dse.ldif too
'nsslapd-localuser': 'dirsrv',
'nsslapd-defaultnamingcontext': 'dc=example,dc=com', # Can't delete
'nsslapd-accesslog': '/opt/dirsrv/var/log/dirsrv/slapd-standalone/access',
'nsslapd-auditlog': '/opt/dirsrv/var/log/dirsrv/slapd-standalone/audit',
'nsslapd-errorlog': '/opt/dirsrv/var/log/dirsrv/slapd-standalone/errors',
'nsslapd-tmpdir': '/tmp',
'nsslapd-rundir': '/opt/dirsrv/var/run/dirsrv',
'nsslapd-bakdir': '/opt/dirsrv/var/lib/dirsrv/slapd-standalone/bak',
'nsslapd-certdir': '/opt/dirsrv/etc/dirsrv/slapd-standalone',
'nsslapd-instancedir': '/opt/dirsrv/lib/dirsrv/slapd-standalone',
'nsslapd-ldifdir': '/opt/dirsrv/var/lib/dirsrv/slapd-standalone/ldif',
'nsslapd-lockdir': '/opt/dirsrv/var/lock/dirsrv/slapd-standalone',
'nsslapd-schemadir': '/opt/dirsrv/etc/dirsrv/slapd-standalone/schema',
'nsslapd-workingdir': '/opt/dirsrv/var/log/dirsrv/slapd-standalone',
'nsslapd-localhost': 'localhost.localdomain',
# These can't be reset, but might be in dse.ldif. Probably in libglobs.
'nsslapd-certmap-basedn': 'cn=certmap,cn=config',
'nsslapd-port': '38931', # Can't delete
'nsslapd-secureport': '636', # Can't delete
'nsslapd-conntablesize': '1048576',
'nsslapd-rootpw': '{SSHA512}...',
# These are hardcoded server magic.
'nsslapd-hash-filters': 'off', # Can't delete
'nsslapd-requiresrestart': 'cn=config:nsslapd-port', # Can't change
'nsslapd-plugin': 'cn=case ignore string syntax,cn=plugins,cn=config', # Can't change
'nsslapd-privatenamespaces': 'cn=schema', # Can't change
'nsslapd-allowed-to-delete-attrs': 'None', # Can't delete
'nsslapd-accesslog-list': 'List!', # Can't delete
'nsslapd-auditfaillog-list': 'List!',
'nsslapd-auditlog-list': 'List!',
'nsslapd-errorlog-list': 'List!',
'nsslapd-config': 'cn=config',
'nsslapd-versionstring': '389-Directory/1.3.6.0',
'objectclass': '',
'cn': '',
# These are the odd values
'nsslapd-backendconfig': 'cn=config,cn=userRoot,cn=ldbm database,cn=plugins,cn=config', # Doesn't exist?
'nsslapd-betype': 'ldbm database', # Doesn't exist?
'nsslapd-connection-buffer': 1, # Has an ldap problem
'nsslapd-malloc-mmap-threshold': '-10', # Defunct anyway
'nsslapd-malloc-mxfast': '-10',
'nsslapd-malloc-trim-threshold': '-10',
'nsslapd-referralmode': '',
'nsslapd-saslpath': '',
'passwordadmindn': '',
}

> config_entry = topology_st.standalone.config.raw_entry()

/export/tests/tickets/ticket48961_test.py:101:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.config.Config object at 0x7fd1669c8d90>, name = 'raw_entry'

def __getattr__(self, name):
"""This enables a bit of magic to allow us to wrap any function ending with
_json to it's form without json, then transformed. It means your function
*must* return it's values as a dict of:

{ attr : [val, val, ...], attr : [], ... }
to be supported.
"""

if (name.endswith('_json')):
int_name = name.replace('_json', '')
pfunc = partial(self._jsonify, getattr(self, int_name))
return pfunc
else:
> raise AttributeError("'%s' object has no attribute '%s'" % (self.__class__.__name__, name))
E AttributeError: 'Config' object has no attribute 'raw_entry'

/usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:199: AttributeError
Failed tickets/ticket49039_test.py::test_ticket49039 9.29
topo = <lib389.topologies.TopologyMain object at 0x7fd166920550>

def test_ticket49039(topo):
"""Test "password must change" verses "password min age". Min age should not
block password update if the password was reset.
"""

# Setup SSL (for ldappasswd test)
topo.standalone.enable_tls()

# Configure password policy
try:
policy = PwPolicyManager(topo.standalone)
policy.set_global_policy(properties={'nsslapd-pwpolicy-local': 'on',
'passwordMustChange': 'on',
'passwordExp': 'on',
'passwordMaxAge': '86400000',
'passwordMinAge': '8640000',
'passwordChange': 'on'})
except ldap.LDAPError as e:
log.fatal('Failed to set password policy: ' + str(e))

# Add user, bind, and set password
try:
topo.standalone.add_s(Entry((USER_DN, {
'objectclass': 'top extensibleObject'.split(),
'uid': 'user1',
'userpassword': PASSWORD
})))
except ldap.LDAPError as e:
log.fatal('Failed to add user: error ' + e.args[0]['desc'])
assert False

# Reset password as RootDN
try:
topo.standalone.modify_s(USER_DN, [(ldap.MOD_REPLACE, 'userpassword', ensure_bytes(PASSWORD))])
except ldap.LDAPError as e:
log.fatal('Failed to bind: error ' + e.args[0]['desc'])
assert False

time.sleep(1)

# Reset password as user
try:
topo.standalone.simple_bind_s(USER_DN, PASSWORD)
except ldap.LDAPError as e:
log.fatal('Failed to bind: error ' + e.args[0]['desc'])
assert False

try:
> topo.standalone.modify_s(USER_DN, [(ldap.MOD_REPLACE, 'userpassword', ensure_bytes(PASSWORD))])

/export/tests/tickets/ticket49039_test.py:75:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

args = ('uid=user,dc=example,dc=com', [(2, 'userpassword', b'password')])
kwargs = {}
c_stack = [FrameInfo(frame=<frame at 0x7fd166e34840, file '/usr/local/lib/python3.8/site-packages/lib389/__init__.py', line 180,...93, function='_hookexec', code_context=[' return self._inner_hookexec(hook, methods, kwargs)\n'], index=0), ...]
frame = FrameInfo(frame=<frame at 0x559a4f9de410, file '/export/tests/tickets/ticket49039_test.py', line 78, code test_ticket4...[" topo.standalone.modify_s(USER_DN, [(ldap.MOD_REPLACE, 'userpassword', ensure_bytes(PASSWORD))])\n"], index=0)

def inner(*args, **kwargs):
if name in [
'add_s',
'bind_s',
'delete_s',
'modify_s',
'modrdn_s',
'rename_s',
'sasl_interactive_bind_s',
'search_s',
'search_ext_s',
'simple_bind_s',
'unbind_s',
'getEntry',
] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'):
c_stack = inspect.stack()
frame = c_stack[1]

warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. "
"Found in: %s:%s" % (name, frame.filename, frame.lineno)))
# Later, we will add a sleep here to make it even more painful.
# Finally, it will raise an exception.
elif 'escapehatch' in kwargs:
kwargs.pop('escapehatch')

if name == 'result':
objtype, data = f(*args, **kwargs)
# data is either a 2-tuple or a list of 2-tuples
# print data
if data:
if isinstance(data, tuple):
return objtype, Entry(data)
elif isinstance(data, list):
# AD sends back these search references
# if objtype == ldap.RES_SEARCH_RESULT and \
# isinstance(data[-1],tuple) and \
# not data[-1][0]:
# print "Received search reference: "
# pprint.pprint(data[-1][1])
# data.pop() # remove the last non-entry element

return objtype, [Entry(x) for x in data]
else:
raise TypeError("unknown data type %s returned by result" %
type(data))
else:
return objtype, data
elif name.startswith('add'):
# the first arg is self
# the second and third arg are the dn and the data to send
# We need to convert the Entry into the format used by
# python-ldap
ent = args[0]
if isinstance(ent, Entry):
return f(ent.dn, ent.toTupleList(), *args[2:])
else:
return f(*args, **kwargs)
else:
> return f(*args, **kwargs)

/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd16690cac0>
dn = 'uid=user,dc=example,dc=com', modlist = [(2, 'userpassword', b'password')]

def modify_s(self,dn,modlist):
> return self.modify_ext_s(dn,modlist,None,None)

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:640:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

args = ('uid=user,dc=example,dc=com', [(2, 'userpassword', b'password')], None, None)
kwargs = {}

def inner(*args, **kwargs):
if name in [
'add_s',
'bind_s',
'delete_s',
'modify_s',
'modrdn_s',
'rename_s',
'sasl_interactive_bind_s',
'search_s',
'search_ext_s',
'simple_bind_s',
'unbind_s',
'getEntry',
] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'):
c_stack = inspect.stack()
frame = c_stack[1]

warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. "
"Found in: %s:%s" % (name, frame.filename, frame.lineno)))
# Later, we will add a sleep here to make it even more painful.
# Finally, it will raise an exception.
elif 'escapehatch' in kwargs:
kwargs.pop('escapehatch')

if name == 'result':
objtype, data = f(*args, **kwargs)
# data is either a 2-tuple or a list of 2-tuples
# print data
if data:
if isinstance(data, tuple):
return objtype, Entry(data)
elif isinstance(data, list):
# AD sends back these search references
# if objtype == ldap.RES_SEARCH_RESULT and \
# isinstance(data[-1],tuple) and \
# not data[-1][0]:
# print "Received search reference: "
# pprint.pprint(data[-1][1])
# data.pop() # remove the last non-entry element

return objtype, [Entry(x) for x in data]
else:
raise TypeError("unknown data type %s returned by result" %
type(data))
else:
return objtype, data
elif name.startswith('add'):
# the first arg is self
# the second and third arg are the dn and the data to send
# We need to convert the Entry into the format used by
# python-ldap
ent = args[0]
if isinstance(ent, Entry):
return f(ent.dn, ent.toTupleList(), *args[2:])
else:
return f(*args, **kwargs)
else:
> return f(*args, **kwargs)

/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd16690cac0>
dn = 'uid=user,dc=example,dc=com', modlist = [(2, 'userpassword', b'password')]
serverctrls = None, clientctrls = None

def modify_ext_s(self,dn,modlist,serverctrls=None,clientctrls=None):
msgid = self.modify_ext(dn,modlist,serverctrls,clientctrls)
> resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout)

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:613:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

args = (7,), kwargs = {'all': 1, 'timeout': -1}

def inner(*args, **kwargs):
if name in [
'add_s',
'bind_s',
'delete_s',
'modify_s',
'modrdn_s',
'rename_s',
'sasl_interactive_bind_s',
'search_s',
'search_ext_s',
'simple_bind_s',
'unbind_s',
'getEntry',
] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'):
c_stack = inspect.stack()
frame = c_stack[1]

warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. "
"Found in: %s:%s" % (name, frame.filename, frame.lineno)))
# Later, we will add a sleep here to make it even more painful.
# Finally, it will raise an exception.
elif 'escapehatch' in kwargs:
kwargs.pop('escapehatch')

if name == 'result':
objtype, data = f(*args, **kwargs)
# data is either a 2-tuple or a list of 2-tuples
# print data
if data:
if isinstance(data, tuple):
return objtype, Entry(data)
elif isinstance(data, list):
# AD sends back these search references
# if objtype == ldap.RES_SEARCH_RESULT and \
# isinstance(data[-1],tuple) and \
# not data[-1][0]:
# print "Received search reference: "
# pprint.pprint(data[-1][1])
# data.pop() # remove the last non-entry element

return objtype, [Entry(x) for x in data]
else:
raise TypeError("unknown data type %s returned by result" %
type(data))
else:
return objtype, data
elif name.startswith('add'):
# the first arg is self
# the second and third arg are the dn and the data to send
# We need to convert the Entry into the format used by
# python-ldap
ent = args[0]
if isinstance(ent, Entry):
return f(ent.dn, ent.toTupleList(), *args[2:])
else:
return f(*args, **kwargs)
else:
> return f(*args, **kwargs)

/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd16690cac0>, msgid = 7, all = 1
timeout = -1, resp_ctrl_classes = None

def result3(self,msgid=ldap.RES_ANY,all=1,timeout=None,resp_ctrl_classes=None):
> resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4(
msgid,all,timeout,
add_ctrls=0,add_intermediates=0,add_extop=0,
resp_ctrl_classes=resp_ctrl_classes
)

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:764:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

args = (7, 1, -1)
kwargs = {'add_ctrls': 0, 'add_extop': 0, 'add_intermediates': 0, 'resp_ctrl_classes': None}

def inner(*args, **kwargs):
if name in [
'add_s',
'bind_s',
'delete_s',
'modify_s',
'modrdn_s',
'rename_s',
'sasl_interactive_bind_s',
'search_s',
'search_ext_s',
'simple_bind_s',
'unbind_s',
'getEntry',
] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'):
c_stack = inspect.stack()
frame = c_stack[1]

warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. "
"Found in: %s:%s" % (name, frame.filename, frame.lineno)))
# Later, we will add a sleep here to make it even more painful.
# Finally, it will raise an exception.
elif 'escapehatch' in kwargs:
kwargs.pop('escapehatch')

if name == 'result':
objtype, data = f(*args, **kwargs)
# data is either a 2-tuple or a list of 2-tuples
# print data
if data:
if isinstance(data, tuple):
return objtype, Entry(data)
elif isinstance(data, list):
# AD sends back these search references
# if objtype == ldap.RES_SEARCH_RESULT and \
# isinstance(data[-1],tuple) and \
# not data[-1][0]:
# print "Received search reference: "
# pprint.pprint(data[-1][1])
# data.pop() # remove the last non-entry element

return objtype, [Entry(x) for x in data]
else:
raise TypeError("unknown data type %s returned by result" %
type(data))
else:
return objtype, data
elif name.startswith('add'):
# the first arg is self
# the second and third arg are the dn and the data to send
# We need to convert the Entry into the format used by
# python-ldap
ent = args[0]
if isinstance(ent, Entry):
return f(ent.dn, ent.toTupleList(), *args[2:])
else:
return f(*args, **kwargs)
else:
> return f(*args, **kwargs)

/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd16690cac0>, msgid = 7, all = 1
timeout = -1, add_ctrls = 0, add_intermediates = 0, add_extop = 0
resp_ctrl_classes = None

def result4(self,msgid=ldap.RES_ANY,all=1,timeout=None,add_ctrls=0,add_intermediates=0,add_extop=0,resp_ctrl_classes=None):
if timeout is None:
timeout = self.timeout
> ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop)

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:774:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

args = (<built-in method result4 of LDAP object at 0x7fd166920e40>, 7, 1, -1, 0, 0, ...)
kwargs = {}

def inner(*args, **kwargs):
if name in [
'add_s',
'bind_s',
'delete_s',
'modify_s',
'modrdn_s',
'rename_s',
'sasl_interactive_bind_s',
'search_s',
'search_ext_s',
'simple_bind_s',
'unbind_s',
'getEntry',
] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'):
c_stack = inspect.stack()
frame = c_stack[1]

warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. "
"Found in: %s:%s" % (name, frame.filename, frame.lineno)))
# Later, we will add a sleep here to make it even more painful.
# Finally, it will raise an exception.
elif 'escapehatch' in kwargs:
kwargs.pop('escapehatch')

if name == 'result':
objtype, data = f(*args, **kwargs)
# data is either a 2-tuple or a list of 2-tuples
# print data
if data:
if isinstance(data, tuple):
return objtype, Entry(data)
elif isinstance(data, list):
# AD sends back these search references
# if objtype == ldap.RES_SEARCH_RESULT and \
# isinstance(data[-1],tuple) and \
# not data[-1][0]:
# print "Received search reference: "
# pprint.pprint(data[-1][1])
# data.pop() # remove the last non-entry element

return objtype, [Entry(x) for x in data]
else:
raise TypeError("unknown data type %s returned by result" %
type(data))
else:
return objtype, data
elif name.startswith('add'):
# the first arg is self
# the second and third arg are the dn and the data to send
# We need to convert the Entry into the format used by
# python-ldap
ent = args[0]
if isinstance(ent, Entry):
return f(ent.dn, ent.toTupleList(), *args[2:])
else:
return f(*args, **kwargs)
else:
> return f(*args, **kwargs)

/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd16690cac0>
func = <built-in method result4 of LDAP object at 0x7fd166920e40>
args = (7, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None
exc_type = None, exc_value = None, exc_traceback = None

def _ldap_call(self,func,*args,**kwargs):
"""
Wrapper method mainly for serializing calls into OpenLDAP libs
and trace logs
"""
self._ldap_object_lock.acquire()
if __debug__:
if self._trace_level>=1:
self._trace_file.write('*** %s %s - %s\n%s\n' % (
repr(self),
self._uri,
'.'.join((self.__class__.__name__,func.__name__)),
pprint.pformat((args,kwargs))
))
if self._trace_level>=9:
traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)
diagnostic_message_success = None
try:
try:
result = func(*args,**kwargs)
if __debug__ and self._trace_level>=2:
if func.__name__!="unbind_ext":
diagnostic_message_success = self._l.get_option(ldap.OPT_DIAGNOSTIC_MESSAGE)
finally:
self._ldap_object_lock.release()
except LDAPError as e:
exc_type,exc_value,exc_traceback = sys.exc_info()
try:
if 'info' not in e.args[0] and 'errno' in e.args[0]:
e.args[0]['info'] = strerror(e.args[0]['errno'])
except IndexError:
pass
if __debug__ and self._trace_level>=2:
self._trace_file.write('=> LDAPError - %s: %s\n' % (e.__class__.__name__,str(e)))
try:
> reraise(exc_type, exc_value, exc_traceback)

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:340:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

exc_type = <class 'ldap.INSUFFICIENT_ACCESS'>
exc_value = INSUFFICIENT_ACCESS({'msgtype': 103, 'msgid': 7, 'result': 50, 'desc': 'Insufficient access', 'ctrls': [], 'info': "Insufficient 'write' privilege to the 'userPassword' attribute of entry 'uid=user,dc=example,dc=com'.\n"})
exc_traceback = <traceback object at 0x7fd167296dc0>

def reraise(exc_type, exc_value, exc_traceback):
"""Re-raise an exception given information from sys.exc_info()

Note that unlike six.reraise, this does not support replacing the
traceback. All arguments must come from a single sys.exc_info() call.
"""
# In Python 3, all exception info is contained in one object.
> raise exc_value

/usr/local/lib64/python3.8/site-packages/ldap/compat.py:46:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd16690cac0>
func = <built-in method result4 of LDAP object at 0x7fd166920e40>
args = (7, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None
exc_type = None, exc_value = None, exc_traceback = None

def _ldap_call(self,func,*args,**kwargs):
"""
Wrapper method mainly for serializing calls into OpenLDAP libs
and trace logs
"""
self._ldap_object_lock.acquire()
if __debug__:
if self._trace_level>=1:
self._trace_file.write('*** %s %s - %s\n%s\n' % (
repr(self),
self._uri,
'.'.join((self.__class__.__name__,func.__name__)),
pprint.pformat((args,kwargs))
))
if self._trace_level>=9:
traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)
diagnostic_message_success = None
try:
try:
> result = func(*args,**kwargs)
E ldap.INSUFFICIENT_ACCESS: {'msgtype': 103, 'msgid': 7, 'result': 50, 'desc': 'Insufficient access', 'ctrls': [], 'info': "Insufficient 'write' privilege to the 'userPassword' attribute of entry 'uid=user,dc=example,dc=com'.\n"}

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:324: INSUFFICIENT_ACCESS

During handling of the above exception, another exception occurred:

topo = <lib389.topologies.TopologyMain object at 0x7fd166920550>

def test_ticket49039(topo):
"""Test "password must change" verses "password min age". Min age should not
block password update if the password was reset.
"""

# Setup SSL (for ldappasswd test)
topo.standalone.enable_tls()

# Configure password policy
try:
policy = PwPolicyManager(topo.standalone)
policy.set_global_policy(properties={'nsslapd-pwpolicy-local': 'on',
'passwordMustChange': 'on',
'passwordExp': 'on',
'passwordMaxAge': '86400000',
'passwordMinAge': '8640000',
'passwordChange': 'on'})
except ldap.LDAPError as e:
log.fatal('Failed to set password policy: ' + str(e))

# Add user, bind, and set password
try:
topo.standalone.add_s(Entry((USER_DN, {
'objectclass': 'top extensibleObject'.split(),
'uid': 'user1',
'userpassword': PASSWORD
})))
except ldap.LDAPError as e:
log.fatal('Failed to add user: error ' + e.args[0]['desc'])
assert False

# Reset password as RootDN
try:
topo.standalone.modify_s(USER_DN, [(ldap.MOD_REPLACE, 'userpassword', ensure_bytes(PASSWORD))])
except ldap.LDAPError as e:
log.fatal('Failed to bind: error ' + e.args[0]['desc'])
assert False

time.sleep(1)

# Reset password as user
try:
topo.standalone.simple_bind_s(USER_DN, PASSWORD)
except ldap.LDAPError as e:
log.fatal('Failed to bind: error ' + e.args[0]['desc'])
assert False

try:
topo.standalone.modify_s(USER_DN, [(ldap.MOD_REPLACE, 'userpassword', ensure_bytes(PASSWORD))])
except ldap.LDAPError as e:
log.fatal('Failed to change password: error ' + e.args[0]['desc'])
> assert False
E assert False

/export/tests/tickets/ticket49039_test.py:78: AssertionError
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
CRITICAL tests.tickets.ticket49039_test:ticket49039_test.py:77 Failed to change password: error Insufficient access
Failed tickets/ticket49072_test.py::test_ticket49072_basedn 4.47
topo = <lib389.topologies.TopologyMain object at 0x7fd166b0a760>

def test_ticket49072_basedn(topo):
"""memberOf fixup task does not validate args

:id: dce9b898-119d-42b8-a236-1130e59bfe18
:feature: memberOf
:setup: Standalone instance, with memberOf plugin
:steps: 1. Run fixup-memberOf.pl with invalid DN entry
2. Check if error log reports "Failed to get be backend"
:expectedresults: Fixup-memberOf.pl task should complete, but errors logged.
"""

log.info("Ticket 49072 memberof fixup task with invalid basedn...")
topo.standalone.plugins.enable(name=PLUGIN_MEMBER_OF)
topo.standalone.restart(timeout=10)

if ds_is_older('1.3'):
inst_dir = topo.standalone.get_inst_dir()
memof_task = os.path.join(inst_dir, FIXUP_MEMOF)
try:
output = subprocess.check_output([memof_task, '-D', DN_DM, '-w', PASSWORD, '-b', TEST_BASEDN, '-f', FILTER])
except subprocess.CalledProcessError as err:
output = err.output
else:
sbin_dir = topo.standalone.get_sbin_dir()
memof_task = os.path.join(sbin_dir, FIXUP_MEMOF)
try:
> output = subprocess.check_output(
[memof_task, '-D', DN_DM, '-w', PASSWORD, '-b', TEST_BASEDN, '-Z', SERVERID_STANDALONE, '-f', FILTER])

/export/tests/tickets/ticket49072_test.py:55:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/usr/lib64/python3.8/subprocess.py:411: in check_output
return run(*popenargs, stdout=PIPE, timeout=timeout, check=True,
/usr/lib64/python3.8/subprocess.py:489: in run
with Popen(*popenargs, **kwargs) as process:
/usr/lib64/python3.8/subprocess.py:854: in __init__
self._execute_child(args, executable, preexec_fn, close_fds,
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <subprocess.Popen object at 0x7fd166cc87c0>
args = ['/usr/sbin/fixup-memberof.pl', '-D', 'cn=Directory Manager', '-w', 'password', '-b', ...]
executable = b'/usr/sbin/fixup-memberof.pl', preexec_fn = None, close_fds = True
pass_fds = (), cwd = None, env = None, startupinfo = None, creationflags = 0
shell = False, p2cread = -1, p2cwrite = -1, c2pread = 40, c2pwrite = 42
errread = -1, errwrite = -1, restore_signals = True, start_new_session = False

def _execute_child(self, args, executable, preexec_fn, close_fds,
pass_fds, cwd, env,
startupinfo, creationflags, shell,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite,
restore_signals, start_new_session):
"""Execute program (POSIX version)"""

if isinstance(args, (str, bytes)):
args = [args]
elif isinstance(args, os.PathLike):
if shell:
raise TypeError('path-like args is not allowed when '
'shell is true')
args = [args]
else:
args = list(args)

if shell:
# On Android the default shell is at '/system/bin/sh'.
unix_shell = ('/system/bin/sh' if
hasattr(sys, 'getandroidapilevel') else '/bin/sh')
args = [unix_shell, "-c"] + args
if executable:
args[0] = executable

if executable is None:
executable = args[0]

sys.audit("subprocess.Popen", executable, args, cwd, env)

if (_USE_POSIX_SPAWN
and os.path.dirname(executable)
and preexec_fn is None
and not close_fds
and not pass_fds
and cwd is None
and (p2cread == -1 or p2cread > 2)
and (c2pwrite == -1 or c2pwrite > 2)
and (errwrite == -1 or errwrite > 2)
and not start_new_session):
self._posix_spawn(args, executable, env, restore_signals,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite)
return

orig_executable = executable

# For transferring possible exec failure from child to parent.
# Data format: "exception name:hex errno:description"
# Pickle is not used; it is complex and involves memory allocation.
errpipe_read, errpipe_write = os.pipe()
# errpipe_write must not be in the standard io 0, 1, or 2 fd range.
low_fds_to_close = []
while errpipe_write < 3:
low_fds_to_close.append(errpipe_write)
errpipe_write = os.dup(errpipe_write)
for low_fd in low_fds_to_close:
os.close(low_fd)
try:
try:
# We must avoid complex work that could involve
# malloc or free in the child process to avoid
# potential deadlocks, thus we do all this here.
# and pass it to fork_exec()

if env is not None:
env_list = []
for k, v in env.items():
k = os.fsencode(k)
if b'=' in k:
raise ValueError("illegal environment variable name")
env_list.append(k + b'=' + os.fsencode(v))
else:
env_list = None # Use execv instead of execve.
executable = os.fsencode(executable)
if os.path.dirname(executable):
executable_list = (executable,)
else:
# This matches the behavior of os._execvpe().
executable_list = tuple(
os.path.join(os.fsencode(dir), executable)
for dir in os.get_exec_path(env))
fds_to_keep = set(pass_fds)
fds_to_keep.add(errpipe_write)
self.pid = _posixsubprocess.fork_exec(
args, executable_list,
close_fds, tuple(sorted(map(int, fds_to_keep))),
cwd, env_list,
p2cread, p2cwrite, c2pread, c2pwrite,
errread, errwrite,
errpipe_read, errpipe_write,
restore_signals, start_new_session, preexec_fn)
self._child_created = True
finally:
# be sure the FD is closed no matter what
os.close(errpipe_write)

self._close_pipe_fds(p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite)

# Wait for exec to fail or succeed; possibly raising an
# exception (limited in size)
errpipe_data = bytearray()
while True:
part = os.read(errpipe_read, 50000)
errpipe_data += part
if not part or len(errpipe_data) > 50000:
break
finally:
# be sure the FD is closed no matter what
os.close(errpipe_read)

if errpipe_data:
try:
pid, sts = os.waitpid(self.pid, 0)
if pid == self.pid:
self._handle_exitstatus(sts)
else:
self.returncode = sys.maxsize
except ChildProcessError:
pass

try:
exception_name, hex_errno, err_msg = (
errpipe_data.split(b':', 2))
# The encoding here should match the encoding
# written in by the subprocess implementations
# like _posixsubprocess
err_msg = err_msg.decode()
except ValueError:
exception_name = b'SubprocessError'
hex_errno = b'0'
err_msg = 'Bad exception data from child: {!r}'.format(
bytes(errpipe_data))
child_exception_type = getattr(
builtins, exception_name.decode('ascii'),
SubprocessError)
if issubclass(child_exception_type, OSError) and hex_errno:
errno_num = int(hex_errno, 16)
child_exec_never_called = (err_msg == "noexec")
if child_exec_never_called:
err_msg = ""
# The error must be from chdir(cwd).
err_filename = cwd
else:
err_filename = orig_executable
if errno_num != 0:
err_msg = os.strerror(errno_num)
> raise child_exception_type(errno_num, err_msg, err_filename)
E FileNotFoundError: [Errno 2] No such file or directory: '/usr/sbin/fixup-memberof.pl'

/usr/lib64/python3.8/subprocess.py:1702: FileNotFoundError
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket49072_test:ticket49072_test.py:40 Ticket 49072 memberof fixup task with invalid basedn...
Failed tickets/ticket49072_test.py::test_ticket49072_filter 10.03
topo = <lib389.topologies.TopologyMain object at 0x7fd166b0a760>

def test_ticket49072_filter(topo):
"""memberOf fixup task does not validate args

:id: dde9e893-119d-42c8-a236-1190e56bfe98
:feature: memberOf
:setup: Standalone instance, with memberOf plugin
:steps: 1. Run fixup-memberOf.pl with invalid filter
2. Check if error log reports "Bad search filter"
:expectedresults: Fixup-memberOf.pl task should complete, but errors logged.
"""
log.info("Ticket 49072 memberof fixup task with invalid filter...")
log.info('Wait for 10 secs and check if task is completed')
time.sleep(10)
task_memof = 'cn=memberOf task,cn=tasks,cn=config'
if topo.standalone.search_s(task_memof, ldap.SCOPE_SUBTREE, 'cn=memberOf_fixup*', ['dn:']):
log.info('memberof task is still running, wait for +10 secs')
time.sleep(10)

if ds_is_older('1.3'):
inst_dir = topo.standalone.get_inst_dir()
memof_task = os.path.join(inst_dir, FIXUP_MEMOF)
try:
output = subprocess.check_output([memof_task, '-D', DN_DM, '-w', PASSWORD, '-b', SUFFIX, '-f', TEST_FILTER])
except subprocess.CalledProcessError as err:
output = err.output
else:
sbin_dir = topo.standalone.get_sbin_dir()
memof_task = os.path.join(sbin_dir, FIXUP_MEMOF)
try:
> output = subprocess.check_output(
[memof_task, '-D', DN_DM, '-w', PASSWORD, '-b', SUFFIX, '-Z', SERVERID_STANDALONE, '-f', TEST_FILTER])

/export/tests/tickets/ticket49072_test.py:96:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/usr/lib64/python3.8/subprocess.py:411: in check_output
return run(*popenargs, stdout=PIPE, timeout=timeout, check=True,
/usr/lib64/python3.8/subprocess.py:489: in run
with Popen(*popenargs, **kwargs) as process:
/usr/lib64/python3.8/subprocess.py:854: in __init__
self._execute_child(args, executable, preexec_fn, close_fds,
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <subprocess.Popen object at 0x7fd165dfe310>
args = ['/usr/sbin/fixup-memberof.pl', '-D', 'cn=Directory Manager', '-w', 'password', '-b', ...]
executable = b'/usr/sbin/fixup-memberof.pl', preexec_fn = None, close_fds = True
pass_fds = (), cwd = None, env = None, startupinfo = None, creationflags = 0
shell = False, p2cread = -1, p2cwrite = -1, c2pread = 14, c2pwrite = 17
errread = -1, errwrite = -1, restore_signals = True, start_new_session = False

def _execute_child(self, args, executable, preexec_fn, close_fds,
pass_fds, cwd, env,
startupinfo, creationflags, shell,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite,
restore_signals, start_new_session):
"""Execute program (POSIX version)"""

if isinstance(args, (str, bytes)):
args = [args]
elif isinstance(args, os.PathLike):
if shell:
raise TypeError('path-like args is not allowed when '
'shell is true')
args = [args]
else:
args = list(args)

if shell:
# On Android the default shell is at '/system/bin/sh'.
unix_shell = ('/system/bin/sh' if
hasattr(sys, 'getandroidapilevel') else '/bin/sh')
args = [unix_shell, "-c"] + args
if executable:
args[0] = executable

if executable is None:
executable = args[0]

sys.audit("subprocess.Popen", executable, args, cwd, env)

if (_USE_POSIX_SPAWN
and os.path.dirname(executable)
and preexec_fn is None
and not close_fds
and not pass_fds
and cwd is None
and (p2cread == -1 or p2cread > 2)
and (c2pwrite == -1 or c2pwrite > 2)
and (errwrite == -1 or errwrite > 2)
and not start_new_session):
self._posix_spawn(args, executable, env, restore_signals,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite)
return

orig_executable = executable

# For transferring possible exec failure from child to parent.
# Data format: "exception name:hex errno:description"
# Pickle is not used; it is complex and involves memory allocation.
errpipe_read, errpipe_write = os.pipe()
# errpipe_write must not be in the standard io 0, 1, or 2 fd range.
low_fds_to_close = []
while errpipe_write < 3:
low_fds_to_close.append(errpipe_write)
errpipe_write = os.dup(errpipe_write)
for low_fd in low_fds_to_close:
os.close(low_fd)
try:
try:
# We must avoid complex work that could involve
# malloc or free in the child process to avoid
# potential deadlocks, thus we do all this here.
# and pass it to fork_exec()

if env is not None:
env_list = []
for k, v in env.items():
k = os.fsencode(k)
if b'=' in k:
raise ValueError("illegal environment variable name")
env_list.append(k + b'=' + os.fsencode(v))
else:
env_list = None # Use execv instead of execve.
executable = os.fsencode(executable)
if os.path.dirname(executable):
executable_list = (executable,)
else:
# This matches the behavior of os._execvpe().
executable_list = tuple(
os.path.join(os.fsencode(dir), executable)
for dir in os.get_exec_path(env))
fds_to_keep = set(pass_fds)
fds_to_keep.add(errpipe_write)
self.pid = _posixsubprocess.fork_exec(
args, executable_list,
close_fds, tuple(sorted(map(int, fds_to_keep))),
cwd, env_list,
p2cread, p2cwrite, c2pread, c2pwrite,
errread, errwrite,
errpipe_read, errpipe_write,
restore_signals, start_new_session, preexec_fn)
self._child_created = True
finally:
# be sure the FD is closed no matter what
os.close(errpipe_write)

self._close_pipe_fds(p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite)

# Wait for exec to fail or succeed; possibly raising an
# exception (limited in size)
errpipe_data = bytearray()
while True:
part = os.read(errpipe_read, 50000)
errpipe_data += part
if not part or len(errpipe_data) > 50000:
break
finally:
# be sure the FD is closed no matter what
os.close(errpipe_read)

if errpipe_data:
try:
pid, sts = os.waitpid(self.pid, 0)
if pid == self.pid:
self._handle_exitstatus(sts)
else:
self.returncode = sys.maxsize
except ChildProcessError:
pass

try:
exception_name, hex_errno, err_msg = (
errpipe_data.split(b':', 2))
# The encoding here should match the encoding
# written in by the subprocess implementations
# like _posixsubprocess
err_msg = err_msg.decode()
except ValueError:
exception_name = b'SubprocessError'
hex_errno = b'0'
err_msg = 'Bad exception data from child: {!r}'.format(
bytes(errpipe_data))
child_exception_type = getattr(
builtins, exception_name.decode('ascii'),
SubprocessError)
if issubclass(child_exception_type, OSError) and hex_errno:
errno_num = int(hex_errno, 16)
child_exec_never_called = (err_msg == "noexec")
if child_exec_never_called:
err_msg = ""
# The error must be from chdir(cwd).
err_filename = cwd
else:
err_filename = orig_executable
if errno_num != 0:
err_msg = os.strerror(errno_num)
> raise child_exception_type(errno_num, err_msg, err_filename)
E FileNotFoundError: [Errno 2] No such file or directory: '/usr/sbin/fixup-memberof.pl'

/usr/lib64/python3.8/subprocess.py:1702: FileNotFoundError
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket49072_test:ticket49072_test.py:77 Ticket 49072 memberof fixup task with invalid filter... INFO  tests.tickets.ticket49072_test:ticket49072_test.py:78 Wait for 10 secs and check if task is completed
Failed tickets/ticket49073_test.py::test_ticket49073 7.98
topology_m2 = <lib389.topologies.TopologyMain object at 0x7fd1669468e0>

def test_ticket49073(topology_m2):
"""Write your replication test here.

To access each DirSrv instance use: topology_m2.ms["master1"], topology_m2.ms["master2"],
..., topology_m2.hub1, ..., topology_m2.consumer1,...

Also, if you need any testcase initialization,
please, write additional fixture for that(include finalizer).
"""
topology_m2.ms["master1"].plugins.enable(name=PLUGIN_MEMBER_OF)
topology_m2.ms["master1"].restart(timeout=10)
topology_m2.ms["master2"].plugins.enable(name=PLUGIN_MEMBER_OF)
topology_m2.ms["master2"].restart(timeout=10)

# Configure fractional to prevent total init to send memberof
ents = topology_m2.ms["master1"].agreement.list(suffix=SUFFIX)
assert len(ents) == 1
log.info('update %s to add nsDS5ReplicatedAttributeListTotal' % ents[0].dn)
> topology_m2.ms["master1"].modify_s(ents[0].dn,
[(ldap.MOD_REPLACE,
'nsDS5ReplicatedAttributeListTotal',
'(objectclass=*) $ EXCLUDE '),
(ldap.MOD_REPLACE,
'nsDS5ReplicatedAttributeList',
'(objectclass=*) $ EXCLUDE memberOf')])

/export/tests/tickets/ticket49073_test.py:97:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:640: in modify_s
return self.modify_ext_s(dn,modlist,None,None)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:612: in modify_ext_s
msgid = self.modify_ext(dn,modlist,serverctrls,clientctrls)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:609: in modify_ext
return self._ldap_call(self._l.modify_ext,dn,modlist,RequestControlTuples(serverctrls),RequestControlTuples(clientctrls))
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd166979670>
func = <built-in method modify_ext of LDAP object at 0x7fd166c49ae0>
args = ('cn=002,cn=replica,cn=dc\\3Dexample\\2Cdc\\3Dcom,cn=mapping tree,cn=config', [(2, 'nsDS5ReplicatedAttributeListTotal', '(objectclass=*) $ EXCLUDE '), (2, 'nsDS5ReplicatedAttributeList', '(objectclass=*) $ EXCLUDE memberOf')], None, None)
kwargs = {}, diagnostic_message_success = None

def _ldap_call(self,func,*args,**kwargs):
"""
Wrapper method mainly for serializing calls into OpenLDAP libs
and trace logs
"""
self._ldap_object_lock.acquire()
if __debug__:
if self._trace_level>=1:
self._trace_file.write('*** %s %s - %s\n%s\n' % (
repr(self),
self._uri,
'.'.join((self.__class__.__name__,func.__name__)),
pprint.pformat((args,kwargs))
))
if self._trace_level>=9:
traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)
diagnostic_message_success = None
try:
try:
> result = func(*args,**kwargs)
E TypeError: ('Tuple_to_LDAPMod(): expected a byte string in the list', '(')

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:324: TypeError
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect bde6db26-6065-449b-aec5-12de83e3d7bc / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect f2b3e507-91eb-4f52-b57c-774a5ab0e209 / got description=bde6db26-6065-449b-aec5-12de83e3d7bc) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket49073_test:ticket49073_test.py:96 update cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config to add nsDS5ReplicatedAttributeListTotal
Failed tickets/ticket49104_test.py::test_ticket49104_setup 0.00
topology_st = <lib389.topologies.TopologyMain object at 0x7fd166b1b6d0>

def test_ticket49104_setup(topology_st):
"""
Generate an ldif file having 10K entries and import it.
"""
# Generate a test ldif (100k entries)
ldif_dir = topology_st.standalone.get_ldif_dir()
import_ldif = ldif_dir + '/49104.ldif'
try:
> topology_st.standalone.buildLDIF(100000, import_ldif)

/export/tests/tickets/ticket49104_test.py:30:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd166b1b850>, num = 100000
ldif_file = '/var/lib/dirsrv/slapd-standalone1/ldif/49104.ldif'
suffix = 'dc=example,dc=com'

def buildLDIF(self, num, ldif_file, suffix='dc=example,dc=com'):
"""Generate a simple ldif file using the dbgen.pl script, and set the
ownership and permissions to match the user that the server runs as.

@param num - number of entries to create
@param ldif_file - ldif file name(including the path)
@suffix - DN of the parent entry in the ldif file
@return - nothing
@raise - OSError
"""
> raise Exception("Perl tools disabled on this system. Try dbgen py module.")
E Exception: Perl tools disabled on this system. Try dbgen py module.

/usr/local/lib/python3.8/site-packages/lib389/__init__.py:3236: Exception
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Failed tickets/ticket49192_test.py::test_ticket49192 0.00
topo = <lib389.topologies.TopologyMain object at 0x7fd16680e6d0>

def test_ticket49192(topo):
"""Trigger deadlock when removing suffix
"""

#
# Create a second suffix/backend
#
log.info('Creating second backend...')
> topo.standalone.backends.create(None, properties={
BACKEND_NAME: "Second_Backend",
'suffix': "o=hang.com",
})

/export/tests/tickets/ticket49192_test.py:35:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:1169: in create
return co.create(rdn, properties, self._basedn)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.backend.Backend object at 0x7fd16680e400>, dn = None
properties = {'name': 'Second_Backend', 'suffix': 'o=hang.com'}
basedn = 'cn=ldbm database,cn=plugins,cn=config'

def create(self, dn=None, properties=None, basedn=DN_LDBM):
"""Add a new backend entry, create mapping tree,
and, if requested, sample entries

:param dn: DN of the new entry
:type dn: str
:param properties: Attributes and parameters for the new entry
:type properties: dict
:param basedn: Base DN of the new entry
:type basedn: str

:returns: DSLdapObject of the created entry
"""

sample_entries = False
parent_suffix = False

# normalize suffix (remove spaces between comps)
if dn is not None:
dn_comps = ldap.dn.explode_dn(dn.lower())
dn = ",".join(dn_comps)

if properties is not None:
> suffix_dn = properties['nsslapd-suffix'].lower()
E KeyError: 'nsslapd-suffix'

/usr/local/lib/python3.8/site-packages/lib389/backend.py:609: KeyError
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket49192_test:ticket49192_test.py:34 Creating second backend...
Failed tickets/ticket49287_test.py::test_ticket49287 12.24
self = <lib389.mappingTree.MappingTreeLegacy object at 0x7fd1668cf370>
suffix = 'dc=test,dc=com', bename = 'test', parent = None

def create(self, suffix=None, bename=None, parent=None):
'''
Create a mapping tree entry (under "cn=mapping tree,cn=config"),
for the 'suffix' and that is stored in 'bename' backend.
'bename' backend must exist before creating the mapping tree entry.

If a 'parent' is provided that means that we are creating a
sub-suffix mapping tree.

@param suffix - suffix mapped by this mapping tree entry. It will
be the common name ('cn') of the entry
@param benamebase - backend common name (e.g. 'userRoot')
@param parent - if provided is a parent suffix of 'suffix'

@return DN of the mapping tree entry

@raise ldap.NO_SUCH_OBJECT - if the backend entry or parent mapping
tree does not exist
ValueError - if missing a parameter,

'''
# Check suffix is provided
if not suffix:
raise ValueError("suffix is mandatory")
else:
nsuffix = normalizeDN(suffix)

# Check backend name is provided
if not bename:
raise ValueError("backend name is mandatory")

# Check that if the parent suffix is provided then
# it exists a mapping tree for it
if parent:
nparent = normalizeDN(parent)
filt = suffixfilt(parent)
try:
entry = self.conn.getEntry(DN_MAPPING_TREE, ldap.SCOPE_SUBTREE,
filt)
pass
except NoSuchEntryError:
raise ValueError("parent suffix has no mapping tree")
else:
nparent = ""

# Check if suffix exists, return
filt = suffixfilt(suffix)
try:
entry = self.conn.getEntry(DN_MAPPING_TREE, ldap.SCOPE_SUBTREE,
filt)
return entry
except ldap.NO_SUCH_OBJECT:
entry = None

#
# Now start the real work
#

# fix me when we can actually used escaped DNs
dn = ','.join(('cn="%s"' % nsuffix, DN_MAPPING_TREE))
entry = Entry(dn)
entry.update({
'objectclass': ['top', 'extensibleObject', MT_OBJECTCLASS_VALUE],
'nsslapd-state': 'backend',
# the value in the dn has to be DN escaped
# internal code will add the quoted value - unquoted value is
# useful for searching.
MT_PROPNAME_TO_ATTRNAME[MT_SUFFIX]: nsuffix,
MT_PROPNAME_TO_ATTRNAME[MT_BACKEND]: bename
})

# possibly add the parent
if parent:
entry.setValues(MT_PROPNAME_TO_ATTRNAME[MT_PARENT_SUFFIX], nparent)

try:
self.log.debug("Creating entry: %s", entry.dn)
self.log.info("Entry %r", entry)
> self.conn.add_s(entry)

/usr/local/lib/python3.8/site-packages/lib389/mappingTree.py:155:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

args = (dn: cn="dc=test,dc=com",cn=mapping tree,cn=config
cn: dc=test,dc=com
nsslapd-backend: test
nsslapd-state: backend
objectclass: top
objectclass: extensibleObject
objectclass: nsMappingTree

,)
kwargs = {}
c_stack = [FrameInfo(frame=<frame at 0x7fd166800640, file '/usr/local/lib/python3.8/site-packages/lib389/__init__.py', line 176,...neno=187, function='_multicall', code_context=[' res = hook_impl.function(*args)\n'], index=0), ...]
frame = FrameInfo(frame=<frame at 0x559a4f491a90, file '/usr/local/lib/python3.8/site-packages/lib389/mappingTree.py', line 15.../lib389/mappingTree.py', lineno=155, function='create', code_context=[' self.conn.add_s(entry)\n'], index=0)
ent = dn: cn="dc=test,dc=com",cn=mapping tree,cn=config
cn: dc=test,dc=com
nsslapd-backend: test
nsslapd-state: backend
objectclass: top
objectclass: extensibleObject
objectclass: nsMappingTree



def inner(*args, **kwargs):
if name in [
'add_s',
'bind_s',
'delete_s',
'modify_s',
'modrdn_s',
'rename_s',
'sasl_interactive_bind_s',
'search_s',
'search_ext_s',
'simple_bind_s',
'unbind_s',
'getEntry',
] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'):
c_stack = inspect.stack()
frame = c_stack[1]

warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. "
"Found in: %s:%s" % (name, frame.filename, frame.lineno)))
# Later, we will add a sleep here to make it even more painful.
# Finally, it will raise an exception.
elif 'escapehatch' in kwargs:
kwargs.pop('escapehatch')

if name == 'result':
objtype, data = f(*args, **kwargs)
# data is either a 2-tuple or a list of 2-tuples
# print data
if data:
if isinstance(data, tuple):
return objtype, Entry(data)
elif isinstance(data, list):
# AD sends back these search references
# if objtype == ldap.RES_SEARCH_RESULT and \
# isinstance(data[-1],tuple) and \
# not data[-1][0]:
# print "Received search reference: "
# pprint.pprint(data[-1][1])
# data.pop() # remove the last non-entry element

return objtype, [Entry(x) for x in data]
else:
raise TypeError("unknown data type %s returned by result" %
type(data))
else:
return objtype, data
elif name.startswith('add'):
# the first arg is self
# the second and third arg are the dn and the data to send
# We need to convert the Entry into the format used by
# python-ldap
ent = args[0]
if isinstance(ent, Entry):
> return f(ent.dn, ent.toTupleList(), *args[2:])

/usr/local/lib/python3.8/site-packages/lib389/__init__.py:176:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd1669bfdf0>
dn = 'cn="dc=test,dc=com",cn=mapping tree,cn=config'
modlist = [('objectclass', [b'top', b'extensibleObject', b'nsMappingTree']), ('nsslapd-state', [b'backend']), ('cn', [b'dc=test,dc=com']), ('nsslapd-backend', [b'test'])]

def add_s(self,dn,modlist):
> return self.add_ext_s(dn,modlist,None,None)

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:439:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

args = ('cn="dc=test,dc=com",cn=mapping tree,cn=config', [('objectclass', [b'top', b'extensibleObject', b'nsMappingTree']), ('nsslapd-state', [b'backend']), ('cn', [b'dc=test,dc=com']), ('nsslapd-backend', [b'test'])], None, None)
kwargs = {}, ent = 'cn="dc=test,dc=com",cn=mapping tree,cn=config'

def inner(*args, **kwargs):
if name in [
'add_s',
'bind_s',
'delete_s',
'modify_s',
'modrdn_s',
'rename_s',
'sasl_interactive_bind_s',
'search_s',
'search_ext_s',
'simple_bind_s',
'unbind_s',
'getEntry',
] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'):
c_stack = inspect.stack()
frame = c_stack[1]

warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. "
"Found in: %s:%s" % (name, frame.filename, frame.lineno)))
# Later, we will add a sleep here to make it even more painful.
# Finally, it will raise an exception.
elif 'escapehatch' in kwargs:
kwargs.pop('escapehatch')

if name == 'result':
objtype, data = f(*args, **kwargs)
# data is either a 2-tuple or a list of 2-tuples
# print data
if data:
if isinstance(data, tuple):
return objtype, Entry(data)
elif isinstance(data, list):
# AD sends back these search references
# if objtype == ldap.RES_SEARCH_RESULT and \
# isinstance(data[-1],tuple) and \
# not data[-1][0]:
# print "Received search reference: "
# pprint.pprint(data[-1][1])
# data.pop() # remove the last non-entry element

return objtype, [Entry(x) for x in data]
else:
raise TypeError("unknown data type %s returned by result" %
type(data))
else:
return objtype, data
elif name.startswith('add'):
# the first arg is self
# the second and third arg are the dn and the data to send
# We need to convert the Entry into the format used by
# python-ldap
ent = args[0]
if isinstance(ent, Entry):
return f(ent.dn, ent.toTupleList(), *args[2:])
else:
> return f(*args, **kwargs)

/usr/local/lib/python3.8/site-packages/lib389/__init__.py:178:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd1669bfdf0>
dn = 'cn="dc=test,dc=com",cn=mapping tree,cn=config'
modlist = [('objectclass', [b'top', b'extensibleObject', b'nsMappingTree']), ('nsslapd-state', [b'backend']), ('cn', [b'dc=test,dc=com']), ('nsslapd-backend', [b'test'])]
serverctrls = None, clientctrls = None

def add_ext_s(self,dn,modlist,serverctrls=None,clientctrls=None):
msgid = self.add_ext(dn,modlist,serverctrls,clientctrls)
> resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout)

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:425:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

args = (4,), kwargs = {'all': 1, 'timeout': -1}

def inner(*args, **kwargs):
if name in [
'add_s',
'bind_s',
'delete_s',
'modify_s',
'modrdn_s',
'rename_s',
'sasl_interactive_bind_s',
'search_s',
'search_ext_s',
'simple_bind_s',
'unbind_s',
'getEntry',
] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'):
c_stack = inspect.stack()
frame = c_stack[1]

warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. "
"Found in: %s:%s" % (name, frame.filename, frame.lineno)))
# Later, we will add a sleep here to make it even more painful.
# Finally, it will raise an exception.
elif 'escapehatch' in kwargs:
kwargs.pop('escapehatch')

if name == 'result':
objtype, data = f(*args, **kwargs)
# data is either a 2-tuple or a list of 2-tuples
# print data
if data:
if isinstance(data, tuple):
return objtype, Entry(data)
elif isinstance(data, list):
# AD sends back these search references
# if objtype == ldap.RES_SEARCH_RESULT and \
# isinstance(data[-1],tuple) and \
# not data[-1][0]:
# print "Received search reference: "
# pprint.pprint(data[-1][1])
# data.pop() # remove the last non-entry element

return objtype, [Entry(x) for x in data]
else:
raise TypeError("unknown data type %s returned by result" %
type(data))
else:
return objtype, data
elif name.startswith('add'):
# the first arg is self
# the second and third arg are the dn and the data to send
# We need to convert the Entry into the format used by
# python-ldap
ent = args[0]
if isinstance(ent, Entry):
return f(ent.dn, ent.toTupleList(), *args[2:])
else:
return f(*args, **kwargs)
else:
> return f(*args, **kwargs)

/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd1669bfdf0>, msgid = 4, all = 1
timeout = -1, resp_ctrl_classes = None

def result3(self,msgid=ldap.RES_ANY,all=1,timeout=None,resp_ctrl_classes=None):
> resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4(
msgid,all,timeout,
add_ctrls=0,add_intermediates=0,add_extop=0,
resp_ctrl_classes=resp_ctrl_classes
)

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:764:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

args = (4, 1, -1)
kwargs = {'add_ctrls': 0, 'add_extop': 0, 'add_intermediates': 0, 'resp_ctrl_classes': None}

def inner(*args, **kwargs):
if name in [
'add_s',
'bind_s',
'delete_s',
'modify_s',
'modrdn_s',
'rename_s',
'sasl_interactive_bind_s',
'search_s',
'search_ext_s',
'simple_bind_s',
'unbind_s',
'getEntry',
] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'):
c_stack = inspect.stack()
frame = c_stack[1]

warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. "
"Found in: %s:%s" % (name, frame.filename, frame.lineno)))
# Later, we will add a sleep here to make it even more painful.
# Finally, it will raise an exception.
elif 'escapehatch' in kwargs:
kwargs.pop('escapehatch')

if name == 'result':
objtype, data = f(*args, **kwargs)
# data is either a 2-tuple or a list of 2-tuples
# print data
if data:
if isinstance(data, tuple):
return objtype, Entry(data)
elif isinstance(data, list):
# AD sends back these search references
# if objtype == ldap.RES_SEARCH_RESULT and \
# isinstance(data[-1],tuple) and \
# not data[-1][0]:
# print "Received search reference: "
# pprint.pprint(data[-1][1])
# data.pop() # remove the last non-entry element

return objtype, [Entry(x) for x in data]
else:
raise TypeError("unknown data type %s returned by result" %
type(data))
else:
return objtype, data
elif name.startswith('add'):
# the first arg is self
# the second and third arg are the dn and the data to send
# We need to convert the Entry into the format used by
# python-ldap
ent = args[0]
if isinstance(ent, Entry):
return f(ent.dn, ent.toTupleList(), *args[2:])
else:
return f(*args, **kwargs)
else:
> return f(*args, **kwargs)

/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd1669bfdf0>, msgid = 4, all = 1
timeout = -1, add_ctrls = 0, add_intermediates = 0, add_extop = 0
resp_ctrl_classes = None

def result4(self,msgid=ldap.RES_ANY,all=1,timeout=None,add_ctrls=0,add_intermediates=0,add_extop=0,resp_ctrl_classes=None):
if timeout is None:
timeout = self.timeout
> ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop)

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:774:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

args = (<built-in method result4 of LDAP object at 0x7fd166a11180>, 4, 1, -1, 0, 0, ...)
kwargs = {}

def inner(*args, **kwargs):
if name in [
'add_s',
'bind_s',
'delete_s',
'modify_s',
'modrdn_s',
'rename_s',
'sasl_interactive_bind_s',
'search_s',
'search_ext_s',
'simple_bind_s',
'unbind_s',
'getEntry',
] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'):
c_stack = inspect.stack()
frame = c_stack[1]

warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. "
"Found in: %s:%s" % (name, frame.filename, frame.lineno)))
# Later, we will add a sleep here to make it even more painful.
# Finally, it will raise an exception.
elif 'escapehatch' in kwargs:
kwargs.pop('escapehatch')

if name == 'result':
objtype, data = f(*args, **kwargs)
# data is either a 2-tuple or a list of 2-tuples
# print data
if data:
if isinstance(data, tuple):
return objtype, Entry(data)
elif isinstance(data, list):
# AD sends back these search references
# if objtype == ldap.RES_SEARCH_RESULT and \
# isinstance(data[-1],tuple) and \
# not data[-1][0]:
# print "Received search reference: "
# pprint.pprint(data[-1][1])
# data.pop() # remove the last non-entry element

return objtype, [Entry(x) for x in data]
else:
raise TypeError("unknown data type %s returned by result" %
type(data))
else:
return objtype, data
elif name.startswith('add'):
# the first arg is self
# the second and third arg are the dn and the data to send
# We need to convert the Entry into the format used by
# python-ldap
ent = args[0]
if isinstance(ent, Entry):
return f(ent.dn, ent.toTupleList(), *args[2:])
else:
return f(*args, **kwargs)
else:
> return f(*args, **kwargs)

/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd1669bfdf0>
func = <built-in method result4 of LDAP object at 0x7fd166a11180>
args = (4, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None
exc_type = None, exc_value = None, exc_traceback = None

def _ldap_call(self,func,*args,**kwargs):
"""
Wrapper method mainly for serializing calls into OpenLDAP libs
and trace logs
"""
self._ldap_object_lock.acquire()
if __debug__:
if self._trace_level>=1:
self._trace_file.write('*** %s %s - %s\n%s\n' % (
repr(self),
self._uri,
'.'.join((self.__class__.__name__,func.__name__)),
pprint.pformat((args,kwargs))
))
if self._trace_level>=9:
traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)
diagnostic_message_success = None
try:
try:
result = func(*args,**kwargs)
if __debug__ and self._trace_level>=2:
if func.__name__!="unbind_ext":
diagnostic_message_success = self._l.get_option(ldap.OPT_DIAGNOSTIC_MESSAGE)
finally:
self._ldap_object_lock.release()
except LDAPError as e:
exc_type,exc_value,exc_traceback = sys.exc_info()
try:
if 'info' not in e.args[0] and 'errno' in e.args[0]:
e.args[0]['info'] = strerror(e.args[0]['errno'])
except IndexError:
pass
if __debug__ and self._trace_level>=2:
self._trace_file.write('=> LDAPError - %s: %s\n' % (e.__class__.__name__,str(e)))
try:
> reraise(exc_type, exc_value, exc_traceback)

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:340:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

exc_type = <class 'ldap.UNWILLING_TO_PERFORM'>
exc_value = UNWILLING_TO_PERFORM({'msgtype': 105, 'msgid': 4, 'result': 53, 'desc': 'Server is unwilling to perform', 'ctrls': []})
exc_traceback = <traceback object at 0x7fd1668f60c0>

def reraise(exc_type, exc_value, exc_traceback):
"""Re-raise an exception given information from sys.exc_info()

Note that unlike six.reraise, this does not support replacing the
traceback. All arguments must come from a single sys.exc_info() call.
"""
# In Python 3, all exception info is contained in one object.
> raise exc_value

/usr/local/lib64/python3.8/site-packages/ldap/compat.py:46:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd1669bfdf0>
func = <built-in method result4 of LDAP object at 0x7fd166a11180>
args = (4, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None
exc_type = None, exc_value = None, exc_traceback = None

def _ldap_call(self,func,*args,**kwargs):
"""
Wrapper method mainly for serializing calls into OpenLDAP libs
and trace logs
"""
self._ldap_object_lock.acquire()
if __debug__:
if self._trace_level>=1:
self._trace_file.write('*** %s %s - %s\n%s\n' % (
repr(self),
self._uri,
'.'.join((self.__class__.__name__,func.__name__)),
pprint.pformat((args,kwargs))
))
if self._trace_level>=9:
traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)
diagnostic_message_success = None
try:
try:
> result = func(*args,**kwargs)
E ldap.UNWILLING_TO_PERFORM: {'msgtype': 105, 'msgid': 4, 'result': 53, 'desc': 'Server is unwilling to perform', 'ctrls': []}

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:324: UNWILLING_TO_PERFORM

During handling of the above exception, another exception occurred:

topology_m2 = <lib389.topologies.TopologyMain object at 0x7fd1669bf520>

def test_ticket49287(topology_m2):
"""
test case for memberof and conflict entries

"""

# return
M1 = topology_m2.ms["master1"]
M2 = topology_m2.ms["master2"]

config_memberof(M1)
config_memberof(M2)

_enable_spec_logging(M1)
_enable_spec_logging(M2)

_disable_nunc_stans(M1)
_disable_nunc_stans(M2)

M1.restart(timeout=10)
M2.restart(timeout=10)

testbase = 'dc=test,dc=com'
bename = 'test'
> create_backend(M1, M2, testbase, bename)

/export/tests/tickets/ticket49287_test.py:282:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/export/tests/tickets/ticket49287_test.py:204: in create_backend
s1.mappingtree.create(beSuffix, beName)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.mappingTree.MappingTreeLegacy object at 0x7fd1668cf370>
suffix = 'dc=test,dc=com', bename = 'test', parent = None

def create(self, suffix=None, bename=None, parent=None):
'''
Create a mapping tree entry (under "cn=mapping tree,cn=config"),
for the 'suffix' and that is stored in 'bename' backend.
'bename' backend must exist before creating the mapping tree entry.

If a 'parent' is provided that means that we are creating a
sub-suffix mapping tree.

@param suffix - suffix mapped by this mapping tree entry. It will
be the common name ('cn') of the entry
@param benamebase - backend common name (e.g. 'userRoot')
@param parent - if provided is a parent suffix of 'suffix'

@return DN of the mapping tree entry

@raise ldap.NO_SUCH_OBJECT - if the backend entry or parent mapping
tree does not exist
ValueError - if missing a parameter,

'''
# Check suffix is provided
if not suffix:
raise ValueError("suffix is mandatory")
else:
nsuffix = normalizeDN(suffix)

# Check backend name is provided
if not bename:
raise ValueError("backend name is mandatory")

# Check that if the parent suffix is provided then
# it exists a mapping tree for it
if parent:
nparent = normalizeDN(parent)
filt = suffixfilt(parent)
try:
entry = self.conn.getEntry(DN_MAPPING_TREE, ldap.SCOPE_SUBTREE,
filt)
pass
except NoSuchEntryError:
raise ValueError("parent suffix has no mapping tree")
else:
nparent = ""

# Check if suffix exists, return
filt = suffixfilt(suffix)
try:
entry = self.conn.getEntry(DN_MAPPING_TREE, ldap.SCOPE_SUBTREE,
filt)
return entry
except ldap.NO_SUCH_OBJECT:
entry = None

#
# Now start the real work
#

# fix me when we can actually used escaped DNs
dn = ','.join(('cn="%s"' % nsuffix, DN_MAPPING_TREE))
entry = Entry(dn)
entry.update({
'objectclass': ['top', 'extensibleObject', MT_OBJECTCLASS_VALUE],
'nsslapd-state': 'backend',
# the value in the dn has to be DN escaped
# internal code will add the quoted value - unquoted value is
# useful for searching.
MT_PROPNAME_TO_ATTRNAME[MT_SUFFIX]: nsuffix,
MT_PROPNAME_TO_ATTRNAME[MT_BACKEND]: bename
})

# possibly add the parent
if parent:
entry.setValues(MT_PROPNAME_TO_ATTRNAME[MT_PARENT_SUFFIX], nparent)

try:
self.log.debug("Creating entry: %s", entry.dn)
self.log.info("Entry %r", entry)
self.conn.add_s(entry)
except ldap.LDAPError as e:
> raise ldap.LDAPError("Error adding suffix entry " + dn, e)
E ldap.LDAPError: ('Error adding suffix entry cn="dc=test,dc=com",cn=mapping tree,cn=config', UNWILLING_TO_PERFORM({'msgtype': 105, 'msgid': 4, 'result': 53, 'desc': 'Server is unwilling to perform', 'ctrls': []}))

/usr/local/lib/python3.8/site-packages/lib389/mappingTree.py:157: LDAPError
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 14c29b72-7508-4863-be5a-1140de187094 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 900547c7-dc02-4e09-b1cb-1146ad1a8ca8 / got description=14c29b72-7508-4863-be5a-1140de187094) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket49287_test:ticket49287_test.py:77 update cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config to add nsDS5ReplicatedAttributeListTotal INFO  tests.tickets.ticket49287_test:ticket49287_test.py:77 update cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config to add nsDS5ReplicatedAttributeListTotal INFO  lib389:mappingTree.py:154 Entry dn: cn="dc=test,dc=com",cn=mapping tree,cn=config cn: dc=test,dc=com nsslapd-backend: test nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree
Failed tickets/ticket49303_test.py::test_ticket49303 15.31
topo = <lib389.topologies.TopologyMain object at 0x7fd16683cac0>

def test_ticket49303(topo):
"""
Test the nsTLSAllowClientRenegotiation setting.
"""
sslport = SECUREPORT_STANDALONE1

log.info("Ticket 49303 - Allow disabling of SSL renegotiation")

# No value set, defaults to reneg allowed
enable_ssl(topo.standalone, sslport)
> assert try_reneg(HOST_STANDALONE1, sslport) is True
E AssertionError: assert False is True
E + where False = try_reneg('LOCALHOST', 63601)

/export/tests/tickets/ticket49303_test.py:88: AssertionError
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket49303_test:ticket49303_test.py:84 Ticket 49303 - Allow disabling of SSL renegotiation
Failed tickets/ticket49412_test.py::test_ticket49412 0.00
topo = <lib389.topologies.TopologyMain object at 0x7fd166789c10>

def test_ticket49412(topo):
"""Specify a test case purpose or name here

:id: 4c7681ff-0511-4256-9589-bdcad84c13e6
:setup: Fill in set up configuration here
:steps:
1. Fill in test case steps here
2. And indent them like this (RST format requirement)
:expectedresults:
1. Fill in the result that is expected
2. For each test step
"""

M1 = topo.ms["master1"]

# wrong call with invalid value (should be str(60)
# that create replace with NULL value
# it should fail with UNWILLING_TO_PERFORM
try:
> M1.modify_s(CHANGELOG, [(ldap.MOD_REPLACE, MAXAGE_ATTR, 60),
(ldap.MOD_REPLACE, TRIMINTERVAL, 10)])

/export/tests/tickets/ticket49412_test.py:44:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:640: in modify_s
return self.modify_ext_s(dn,modlist,None,None)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:613: in modify_ext_s
resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:764: in result3
resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4(
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:774: in result4
ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call
reraise(exc_type, exc_value, exc_traceback)
/usr/local/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise
raise exc_value
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd166c88670>
func = <built-in method result4 of LDAP object at 0x7fd166c80720>
args = (39, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None
exc_type = None, exc_value = None, exc_traceback = None

def _ldap_call(self,func,*args,**kwargs):
"""
Wrapper method mainly for serializing calls into OpenLDAP libs
and trace logs
"""
self._ldap_object_lock.acquire()
if __debug__:
if self._trace_level>=1:
self._trace_file.write('*** %s %s - %s\n%s\n' % (
repr(self),
self._uri,
'.'.join((self.__class__.__name__,func.__name__)),
pprint.pformat((args,kwargs))
))
if self._trace_level>=9:
traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)
diagnostic_message_success = None
try:
try:
> result = func(*args,**kwargs)
E ldap.NO_SUCH_OBJECT: {'msgtype': 103, 'msgid': 39, 'result': 32, 'desc': 'No such object', 'ctrls': []}

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:324: NO_SUCH_OBJECT
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for consumer1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:169 Joining consumer consumer1 from master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 10382b5b-884f-4715-b98c-fd9ae8d44545 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is working INFO  lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 INFO  lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 already exists
Failed tickets/ticket49463_test.py::test_ticket_49463 204.05
topo = <lib389.topologies.TopologyMain object at 0x7fd1669bffd0>

def test_ticket_49463(topo):
"""Specify a test case purpose or name here

:id: 2a68e8be-387d-4ac7-9452-1439e8483c13
:setup: Fill in set up configuration here
:steps:
1. Enable fractional replication
2. Enable replication logging
3. Check that replication is working fine
4. Generate skipped updates to create keep alive entries
5. Remove M3 from the topology
6. issue cleanAllRuv FORCE that will run on M1 then propagated M2 and M4
7. Check that Number DEL keep alive '3' is <= 1
8. Check M1 is the originator of cleanAllRuv and M2/M4 the propagated ones
9. Check replication M1,M2 and M4 can recover
10. Remove M4 from the topology
11. Issue cleanAllRuv not force while M2 is stopped (that hangs the cleanAllRuv)
12. Check that nsds5ReplicaCleanRUV is correctly encoded on M1 (last value: 1)
13. Check that nsds5ReplicaCleanRUV encoding survives M1 restart
14. Check that nsds5ReplicaCleanRUV encoding is valid on M2 (last value: 0)
15. Check that (for M4 cleanAllRUV) M1 is Originator and M2 propagation
:expectedresults:
1. No report of failure when the RUV is updated
"""

# Step 1 - Configure fractional (skip telephonenumber) replication
M1 = topo.ms["master1"]
M2 = topo.ms["master2"]
M3 = topo.ms["master3"]
M4 = topo.ms["master4"]
repl = ReplicationManager(DEFAULT_SUFFIX)
fractional_server_to_replica(M1, M2)
fractional_server_to_replica(M1, M3)
fractional_server_to_replica(M1, M4)

fractional_server_to_replica(M2, M1)
fractional_server_to_replica(M2, M3)
fractional_server_to_replica(M2, M4)

fractional_server_to_replica(M3, M1)
fractional_server_to_replica(M3, M2)
fractional_server_to_replica(M3, M4)

fractional_server_to_replica(M4, M1)
fractional_server_to_replica(M4, M2)
fractional_server_to_replica(M4, M3)

# Step 2 - enable internal op logging and replication debug
for i in (M1, M2, M3, M4):
i.config.loglevel(vals=[256 + 4], service='access')
i.config.loglevel(vals=[LOG_REPLICA, LOG_DEFAULT], service='error')

# Step 3 - Check that replication is working fine
add_user(M1, 11, desc="add to M1")
add_user(M2, 21, desc="add to M2")
add_user(M3, 31, desc="add to M3")
add_user(M4, 41, desc="add to M4")

for i in (M1, M2, M3, M4):
for j in (M1, M2, M3, M4):
if i == j:
continue
repl.wait_for_replication(i, j)

# Step 4 - Generate skipped updates to create keep alive entries
for i in (M1, M2, M3, M4):
cn = '%s_%d' % (USER_CN, 11)
dn = 'uid=%s,ou=People,%s' % (cn, SUFFIX)
users = UserAccount(i, dn)
for j in range(110):
users.set('telephoneNumber', str(j))

# Step 5 - Remove M3 from the topology
M3.stop()
M1.agreement.delete(suffix=SUFFIX, consumer_host=M3.host, consumer_port=M3.port)
M2.agreement.delete(suffix=SUFFIX, consumer_host=M3.host, consumer_port=M3.port)
M4.agreement.delete(suffix=SUFFIX, consumer_host=M3.host, consumer_port=M3.port)

# Step 6 - Then issue cleanAllRuv FORCE that will run on M1, M2 and M4
M1.tasks.cleanAllRUV(suffix=SUFFIX, replicaid='3',
force=True, args={TASK_WAIT: True})

# Step 7 - Count the number of received DEL of the keep alive 3
for i in (M1, M2, M4):
i.restart()
regex = re.compile(".*DEL dn=.cn=repl keep alive 3.*")
for i in (M1, M2, M4):
count = count_pattern_accesslog(M1, regex)
log.debug("count on %s = %d" % (i, count))

# check that DEL is replicated once (If DEL is kept in the fix)
# check that DEL is is not replicated (If DEL is finally no long done in the fix)
assert ((count == 1) or (count == 0))

# Step 8 - Check that M1 is Originator of cleanAllRuv and M2, M4 propagation
regex = re.compile(".*Original task deletes Keep alive entry .3.*")
assert pattern_errorlog(M1, regex)

regex = re.compile(".*Propagated task does not delete Keep alive entry .3.*")
assert pattern_errorlog(M2, regex)
assert pattern_errorlog(M4, regex)

# Step 9 - Check replication M1,M2 and M4 can recover
add_user(M1, 12, desc="add to M1")
add_user(M2, 22, desc="add to M2")
for i in (M1, M2, M4):
for j in (M1, M2, M4):
if i == j:
continue
repl.wait_for_replication(i, j)

# Step 10 - Remove M4 from the topology
M4.stop()
M1.agreement.delete(suffix=SUFFIX, consumer_host=M4.host, consumer_port=M4.port)
M2.agreement.delete(suffix=SUFFIX, consumer_host=M4.host, consumer_port=M4.port)

# Step 11 - Issue cleanAllRuv not force while M2 is stopped (that hangs the cleanAllRuv)
M2.stop()
M1.tasks.cleanAllRUV(suffix=SUFFIX, replicaid='4',
force=False, args={TASK_WAIT: False})

# Step 12
# CleanAllRuv is hanging waiting for M2 to restart
# Check that nsds5ReplicaCleanRUV is correctly encoded on M1
replicas = Replicas(M1)
replica = replicas.list()[0]
time.sleep(0.5)
replica.present('nsds5ReplicaCleanRUV')
log.info("M1: nsds5ReplicaCleanRUV=%s" % replica.get_attr_val_utf8('nsds5replicacleanruv'))
regex = re.compile("^4:.*:no:1$")
> assert regex.match(replica.get_attr_val_utf8('nsds5replicacleanruv'))
E AssertionError: assert None
E + where None = <built-in method match of re.Pattern object at 0x7fd1672ed110>('4:no:1:dc=example,dc=com')
E + where <built-in method match of re.Pattern object at 0x7fd1672ed110> = re.compile('^4:.*:no:1$').match
E + and '4:no:1:dc=example,dc=com' = <bound method DSLdapObject.get_attr_val_utf8 of <lib389.replica.Replica object at 0x7fd16681aa60>>('nsds5replicacleanruv')
E + where <bound method DSLdapObject.get_attr_val_utf8 of <lib389.replica.Replica object at 0x7fd16681aa60>> = <lib389.replica.Replica object at 0x7fd16681aa60>.get_attr_val_utf8

/export/tests/tickets/ticket49463_test.py:188: AssertionError
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master3 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'master3', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master4 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39004, 'ldap-secureport': 63704, 'server-id': 'master4', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 7d267a0c-5fa6-4a64-b1cb-69312fe71f95 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 63c93940-4969-42c7-84c9-11d46a951b44 / got description=7d267a0c-5fa6-4a64-b1cb-69312fe71f95) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:156 Joining master master3 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 6e150e30-e9f6-42d3-b85e-73b6f997e8d8 / got description=63c93940-4969-42c7-84c9-11d46a951b44) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ae589e9d-7b41-4204-b5ab-5f11fb1f679b / got description=6e150e30-e9f6-42d3-b85e-73b6f997e8d8) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 INFO  lib389.topologies:topologies.py:156 Joining master master4 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect cd219720-4940-4c0d-ab69-60479bfa66a7 / got description=ae589e9d-7b41-4204-b5ab-5f11fb1f679b) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect eebe68be-6587-4db5-8bb3-d5e20a7a9528 / got description=cd219720-4940-4c0d-ab69-60479bfa66a7) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master3 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master4 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master3 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master4 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master2 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master4 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master4 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master4 to master2 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master4 to master3 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 already exists INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 already exists INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 already exists INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 already exists INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 already exists INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 already exists INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 4788b594-27e3-4ff6-bd0e-4b7f74e2d5f5 / got description=eebe68be-6587-4db5-8bb3-d5e20a7a9528) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect daaad5e1-374d-432a-b7dd-b7c30575621d / got description=4788b594-27e3-4ff6-bd0e-4b7f74e2d5f5) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 95cf8330-bab3-4eb9-b04b-634d02ea4059 / got description=daaad5e1-374d-432a-b7dd-b7c30575621d) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 3c5f00b9-7111-412e-a99b-a9ef27ace241 / got description=95cf8330-bab3-4eb9-b04b-634d02ea4059) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 3c5f00b9-7111-412e-a99b-a9ef27ace241 / got description=95cf8330-bab3-4eb9-b04b-634d02ea4059) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 3c5f00b9-7111-412e-a99b-a9ef27ace241 / got description=95cf8330-bab3-4eb9-b04b-634d02ea4059) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect d11c9cec-f4e8-4fd6-b6ca-2579d220a300 / got description=3c5f00b9-7111-412e-a99b-a9ef27ace241) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 49a90741-1be1-4565-8d47-c1ce7fe46738 / got description=d11c9cec-f4e8-4fd6-b6ca-2579d220a300) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect f420435e-9ea8-4101-871b-433679b9cf71 / got description=49a90741-1be1-4565-8d47-c1ce7fe46738) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 17c16a8d-1484-4264-a341-3559e14dc881 / got description=f420435e-9ea8-4101-871b-433679b9cf71) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 2143e661-5928-4ed3-b79f-75e6546f6a14 / got description=17c16a8d-1484-4264-a341-3559e14dc881) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 68f26556-b83c-46f2-b5e3-b88ec8e16f5d / got description=17c16a8d-1484-4264-a341-3559e14dc881) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 68f26556-b83c-46f2-b5e3-b88ec8e16f5d / got description=2143e661-5928-4ed3-b79f-75e6546f6a14) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect e86781d0-4e2c-4c39-8999-925c8de44fef / got description=68f26556-b83c-46f2-b5e3-b88ec8e16f5d) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect c071dd5d-ee89-4b92-87d3-52a69993523f / got description=e86781d0-4e2c-4c39-8999-925c8de44fef) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect c071dd5d-ee89-4b92-87d3-52a69993523f / got description=e86781d0-4e2c-4c39-8999-925c8de44fef) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect c071dd5d-ee89-4b92-87d3-52a69993523f / got description=e86781d0-4e2c-4c39-8999-925c8de44fef) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect c071dd5d-ee89-4b92-87d3-52a69993523f / got description=e86781d0-4e2c-4c39-8999-925c8de44fef) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389:agreement.py:1095 Agreement (cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config) was successfully removed INFO  lib389:agreement.py:1095 Agreement (cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config) was successfully removed INFO  lib389:agreement.py:1095 Agreement (cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config) was successfully removed INFO  lib389:tasks.py:1400 cleanAllRUV task (task-10292020_003558) completed successfully INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 04580b60-0c41-4637-ac7e-5ca6f6c08018 / got description=c071dd5d-ee89-4b92-87d3-52a69993523f) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 04580b60-0c41-4637-ac7e-5ca6f6c08018 / got description=c071dd5d-ee89-4b92-87d3-52a69993523f) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 93d4230f-65c9-4c64-b248-1545649f5032 / got description=04580b60-0c41-4637-ac7e-5ca6f6c08018) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 548bec87-e964-489d-9a35-f4358e01d023 / got description=93d4230f-65c9-4c64-b248-1545649f5032) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 60f4d6df-a27d-4bea-9742-c2c74fed46fc / got description=548bec87-e964-489d-9a35-f4358e01d023) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 80eb49b1-2d58-4bd5-86d4-5efc650aa9ed / got description=60f4d6df-a27d-4bea-9742-c2c74fed46fc) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 80eb49b1-2d58-4bd5-86d4-5efc650aa9ed / got description=60f4d6df-a27d-4bea-9742-c2c74fed46fc) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 80eb49b1-2d58-4bd5-86d4-5efc650aa9ed / got description=60f4d6df-a27d-4bea-9742-c2c74fed46fc) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect af3fae4c-78d1-4217-9137-442ff77c635c / got description=80eb49b1-2d58-4bd5-86d4-5efc650aa9ed) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389:agreement.py:1095 Agreement (cn=004,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config) was successfully removed INFO  lib389:agreement.py:1095 Agreement (cn=004,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config) was successfully removed INFO  lib389:tasks.py:1400 cleanAllRUV task (task-10292020_003648) completed successfully INFO  lib389.utils:ticket49463_test.py:186 M1: nsds5ReplicaCleanRUV=4:no:1:dc=example,dc=com
Failed tickets/ticket50232_test.py::test_ticket50232_normal 0.63
topology_st = <lib389.topologies.TopologyMain object at 0x7fd16680f070>

def test_ticket50232_normal(topology_st):
"""
The fix for ticket 50232


The test sequence is:
- create suffix
- add suffix entry and some child entries
- "normally" done after populating suffix: enable replication
- get RUV and database generation
- export -r
- import
- get RUV and database generation
- assert database generation has not changed
"""

log.info('Testing Ticket 50232 - export creates not imprtable ldif file, normal creation order')

topology_st.standalone.backend.create(NORMAL_SUFFIX, {BACKEND_NAME: NORMAL_BACKEND_NAME})
topology_st.standalone.mappingtree.create(NORMAL_SUFFIX, bename=NORMAL_BACKEND_NAME, parent=None)

_populate_suffix(topology_st.standalone, NORMAL_BACKEND_NAME)

repl = ReplicationManager(DEFAULT_SUFFIX)
> repl._ensure_changelog(topology_st.standalone)

/export/tests/tickets/ticket50232_test.py:113:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/usr/local/lib/python3.8/site-packages/lib389/replica.py:1928: in _ensure_changelog
cl.create(properties={
/usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:971: in create
return self._create(rdn, properties, basedn, ensure=False)
/usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:946: in _create
self._instance.add_ext_s(e, serverctrls=self._server_controls, clientctrls=self._client_controls, escapehatch='i am sure')
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:176: in inner
return f(ent.dn, ent.toTupleList(), *args[2:])
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:425: in add_ext_s
resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:764: in result3
resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4(
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:774: in result4
ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call
reraise(exc_type, exc_value, exc_traceback)
/usr/local/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise
raise exc_value
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd16680f0d0>
func = <built-in method result4 of LDAP object at 0x7fd16666d7b0>
args = (13, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None
exc_type = None, exc_value = None, exc_traceback = None

def _ldap_call(self,func,*args,**kwargs):
"""
Wrapper method mainly for serializing calls into OpenLDAP libs
and trace logs
"""
self._ldap_object_lock.acquire()
if __debug__:
if self._trace_level>=1:
self._trace_file.write('*** %s %s - %s\n%s\n' % (
repr(self),
self._uri,
'.'.join((self.__class__.__name__,func.__name__)),
pprint.pformat((args,kwargs))
))
if self._trace_level>=9:
traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)
diagnostic_message_success = None
try:
try:
> result = func(*args,**kwargs)
E ldap.UNWILLING_TO_PERFORM: {'msgtype': 105, 'msgid': 13, 'result': 53, 'desc': 'Server is unwilling to perform', 'ctrls': [], 'info': 'Changelog configuration is part of the backend configuration'}

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:324: UNWILLING_TO_PERFORM
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  lib389:backend.py:80 List backend with suffix=o=normal INFO  lib389:backend.py:290 Creating a local backend INFO  lib389:backend.py:76 List backend cn=normal,cn=ldbm database,cn=plugins,cn=config INFO  lib389:__init__.py:1713 Found entry dn: cn=normal,cn=ldbm database,cn=plugins,cn=config cn: normal nsslapd-cachememsize: 512000 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-standalone1/db/normal nsslapd-dncachememsize: 16777216 nsslapd-readonly: off nsslapd-require-index: off nsslapd-require-internalop-index: off nsslapd-suffix: o=normal objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance INFO  lib389:mappingTree.py:154 Entry dn: cn="o=normal",cn=mapping tree,cn=config cn: o=normal nsslapd-backend: normal nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree INFO  lib389:__init__.py:1713 Found entry dn: cn=o\3Dnormal,cn=mapping tree,cn=config cn: o=normal nsslapd-backend: normal nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree
Failed tickets/ticket50232_test.py::test_ticket50232_reverse 0.13
topology_st = <lib389.topologies.TopologyMain object at 0x7fd16680f070>

def test_ticket50232_reverse(topology_st):
"""
The fix for ticket 50232


The test sequence is:
- create suffix
- enable replication before suffix enztry is added
- add suffix entry and some child entries
- get RUV and database generation
- export -r
- import
- get RUV and database generation
- assert database generation has not changed
"""

log.info('Testing Ticket 50232 - export creates not imprtable ldif file, normal creation order')

#
# Setup Replication
#
log.info('Setting up replication...')
repl = ReplicationManager(DEFAULT_SUFFIX)
# repl.create_first_master(topology_st.standalone)
#
# enable dynamic plugins, memberof and retro cl plugin
#
topology_st.standalone.backend.create(REVERSE_SUFFIX, {BACKEND_NAME: REVERSE_BACKEND_NAME})
topology_st.standalone.mappingtree.create(REVERSE_SUFFIX, bename=REVERSE_BACKEND_NAME, parent=None)

> _enable_replica(topology_st.standalone, REVERSE_SUFFIX)

/export/tests/tickets/ticket50232_test.py:155:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/export/tests/tickets/ticket50232_test.py:35: in _enable_replica
repl._ensure_changelog(instance)
/usr/local/lib/python3.8/site-packages/lib389/replica.py:1928: in _ensure_changelog
cl.create(properties={
/usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:971: in create
return self._create(rdn, properties, basedn, ensure=False)
/usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:946: in _create
self._instance.add_ext_s(e, serverctrls=self._server_controls, clientctrls=self._client_controls, escapehatch='i am sure')
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:176: in inner
return f(ent.dn, ent.toTupleList(), *args[2:])
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:425: in add_ext_s
resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:764: in result3
resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4(
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:774: in result4
ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call
reraise(exc_type, exc_value, exc_traceback)
/usr/local/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise
raise exc_value
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd16680f0d0>
func = <built-in method result4 of LDAP object at 0x7fd16666d7b0>
args = (22, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None
exc_type = None, exc_value = None, exc_traceback = None

def _ldap_call(self,func,*args,**kwargs):
"""
Wrapper method mainly for serializing calls into OpenLDAP libs
and trace logs
"""
self._ldap_object_lock.acquire()
if __debug__:
if self._trace_level>=1:
self._trace_file.write('*** %s %s - %s\n%s\n' % (
repr(self),
self._uri,
'.'.join((self.__class__.__name__,func.__name__)),
pprint.pformat((args,kwargs))
))
if self._trace_level>=9:
traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)
diagnostic_message_success = None
try:
try:
> result = func(*args,**kwargs)
E ldap.UNWILLING_TO_PERFORM: {'msgtype': 105, 'msgid': 22, 'result': 53, 'desc': 'Server is unwilling to perform', 'ctrls': [], 'info': 'Changelog configuration is part of the backend configuration'}

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:324: UNWILLING_TO_PERFORM
-------------------------------Captured log call--------------------------------
INFO  lib389:backend.py:80 List backend with suffix=o=reverse INFO  lib389:backend.py:290 Creating a local backend INFO  lib389:backend.py:76 List backend cn=reverse,cn=ldbm database,cn=plugins,cn=config INFO  lib389:__init__.py:1713 Found entry dn: cn=reverse,cn=ldbm database,cn=plugins,cn=config cn: reverse nsslapd-cachememsize: 512000 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-standalone1/db/reverse nsslapd-dncachememsize: 16777216 nsslapd-readonly: off nsslapd-require-index: off nsslapd-require-internalop-index: off nsslapd-suffix: o=reverse objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance INFO  lib389:mappingTree.py:154 Entry dn: cn="o=reverse",cn=mapping tree,cn=config cn: o=reverse nsslapd-backend: reverse nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree INFO  lib389:__init__.py:1713 Found entry dn: cn=o\3Dreverse,cn=mapping tree,cn=config cn: o=reverse nsslapd-backend: reverse nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree
Failed tickets/ticket548_test.py::test_ticket548_test_with_no_policy 0.08
topology_st = <lib389.topologies.TopologyMain object at 0x7fd165daed60>

def test_ticket548_test_with_no_policy(topology_st):
"""
Check shadowAccount under no password policy
"""
log.info("Case 1. No password policy")

log.info("Bind as %s" % DN_DM)
topology_st.standalone.simple_bind_s(DN_DM, PASSWORD)

log.info('Add an entry' + USER1_DN)
try:
topology_st.standalone.add_s(
Entry((USER1_DN, {'objectclass': "top person organizationalPerson inetOrgPerson shadowAccount".split(),
'sn': '1',
'cn': 'user 1',
'uid': 'user1',
'givenname': 'user',
'mail': 'user1@' + DEFAULT_SUFFIX,
'userpassword': USER_PW})))
except ldap.LDAPError as e:
log.fatal('test_ticket548: Failed to add user' + USER1_DN + ': error ' + e.message['desc'])
assert False

edate = int(time.time() / (60 * 60 * 24))
log.info('Search entry %s' % USER1_DN)

log.info("Bind as %s" % USER1_DN)
topology_st.standalone.simple_bind_s(USER1_DN, USER_PW)
> entry = topology_st.standalone.getEntry(USER1_DN, ldap.SCOPE_BASE, "(objectclass=*)", ['shadowLastChange'])

/export/tests/tickets/ticket548_test.py:211:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd165dae880>
args = ('uid=user1,dc=example,dc=com', 0, '(objectclass=*)', ['shadowLastChange'])
kwargs = {}, res = 6, restype = 101, obj = []

def getEntry(self, *args, **kwargs):
"""Wrapper around SimpleLDAPObject.search. It is common to just get
one entry.
@param - entry dn
@param - search scope, in ldap.SCOPE_BASE (default),
ldap.SCOPE_SUB, ldap.SCOPE_ONE
@param filterstr - filterstr, default '(objectClass=*)' from
SimpleLDAPObject
@param attrlist - list of attributes to retrieve. eg ['cn', 'uid']
@oaram attrsonly - default None from SimpleLDAPObject
eg. getEntry(dn, scope, filter, attributes)

XXX This cannot return None
"""
self.log.debug("Retrieving entry with %r", [args])
if len(args) == 1 and 'scope' not in kwargs:
args += (ldap.SCOPE_BASE, )

res = self.search(*args, **kwargs)
restype, obj = self.result(res)
# TODO: why not test restype?
if not obj:
> raise NoSuchEntryError("no such entry for %r", [args])
E lib389.exceptions.NoSuchEntryError: ('no such entry for %r', [('uid=user1,dc=example,dc=com', 0, '(objectclass=*)', ['shadowLastChange'])])

/usr/local/lib/python3.8/site-packages/lib389/__init__.py:1700: NoSuchEntryError
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Failed tickets/ticket548_test.py::test_ticket548_test_global_policy 0.37
topology_st = <lib389.topologies.TopologyMain object at 0x7fd165daed60>

def test_ticket548_test_global_policy(topology_st):
"""
Check shadowAccount with global password policy
"""

log.info("Case 2. Check shadowAccount with global password policy")

log.info("Bind as %s" % DN_DM)
topology_st.standalone.simple_bind_s(DN_DM, PASSWORD)

set_global_pwpolicy(topology_st)

log.info('Add an entry' + USER2_DN)
try:
topology_st.standalone.add_s(
Entry((USER2_DN, {'objectclass': "top person organizationalPerson inetOrgPerson shadowAccount".split(),
'sn': '2',
'cn': 'user 2',
'uid': 'user2',
'givenname': 'user',
'mail': 'user2@' + DEFAULT_SUFFIX,
'userpassword': USER_PW})))
except ldap.LDAPError as e:
log.fatal('test_ticket548: Failed to add user' + USER2_DN + ': error ' + e.message['desc'])
assert False

edate = int(time.time() / (60 * 60 * 24))

log.info("Bind as %s" % USER1_DN)
topology_st.standalone.simple_bind_s(USER1_DN, USER_PW)

log.info('Search entry %s' % USER1_DN)
> entry = topology_st.standalone.getEntry(USER1_DN, ldap.SCOPE_BASE, "(objectclass=*)")

/export/tests/tickets/ticket548_test.py:249:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd165dae880>
args = ('uid=user1,dc=example,dc=com', 0, '(objectclass=*)'), kwargs = {}
res = 15, restype = 101, obj = []

def getEntry(self, *args, **kwargs):
"""Wrapper around SimpleLDAPObject.search. It is common to just get
one entry.
@param - entry dn
@param - search scope, in ldap.SCOPE_BASE (default),
ldap.SCOPE_SUB, ldap.SCOPE_ONE
@param filterstr - filterstr, default '(objectClass=*)' from
SimpleLDAPObject
@param attrlist - list of attributes to retrieve. eg ['cn', 'uid']
@oaram attrsonly - default None from SimpleLDAPObject
eg. getEntry(dn, scope, filter, attributes)

XXX This cannot return None
"""
self.log.debug("Retrieving entry with %r", [args])
if len(args) == 1 and 'scope' not in kwargs:
args += (ldap.SCOPE_BASE, )

res = self.search(*args, **kwargs)
restype, obj = self.result(res)
# TODO: why not test restype?
if not obj:
> raise NoSuchEntryError("no such entry for %r", [args])
E lib389.exceptions.NoSuchEntryError: ('no such entry for %r', [('uid=user1,dc=example,dc=com', 0, '(objectclass=*)')])

/usr/local/lib/python3.8/site-packages/lib389/__init__.py:1700: NoSuchEntryError
Failed tickets/ticket548_test.py::test_ticket548_test_subtree_policy 2.20
topology_st = <lib389.topologies.TopologyMain object at 0x7fd165daed60>
user = 'uid=user3,dc=example,dc=com', passwd = 'password'
newpasswd = 'password0'

def update_passwd(topology_st, user, passwd, newpasswd):
log.info(" Bind as {%s,%s}" % (user, passwd))
topology_st.standalone.simple_bind_s(user, passwd)
try:
> topology_st.standalone.modify_s(user, [(ldap.MOD_REPLACE, 'userpassword', newpasswd.encode())])

/export/tests/tickets/ticket548_test.py:160:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

args = ('uid=user3,dc=example,dc=com', [(2, 'userpassword', b'password0')])
kwargs = {}
c_stack = [FrameInfo(frame=<frame at 0x7fd1676f4a40, file '/usr/local/lib/python3.8/site-packages/lib389/__init__.py', line 180,...mbda>', code_context=[' self._inner_hookexec = lambda hook, methods, kwargs: hook.multicall(\n'], index=0), ...]
frame = FrameInfo(frame=<frame at 0x559a4eedb710, file '/export/tests/tickets/ticket548_test.py', line 164, code update_passwd...[" topology_st.standalone.modify_s(user, [(ldap.MOD_REPLACE, 'userpassword', newpasswd.encode())])\n"], index=0)

def inner(*args, **kwargs):
if name in [
'add_s',
'bind_s',
'delete_s',
'modify_s',
'modrdn_s',
'rename_s',
'sasl_interactive_bind_s',
'search_s',
'search_ext_s',
'simple_bind_s',
'unbind_s',
'getEntry',
] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'):
c_stack = inspect.stack()
frame = c_stack[1]

warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. "
"Found in: %s:%s" % (name, frame.filename, frame.lineno)))
# Later, we will add a sleep here to make it even more painful.
# Finally, it will raise an exception.
elif 'escapehatch' in kwargs:
kwargs.pop('escapehatch')

if name == 'result':
objtype, data = f(*args, **kwargs)
# data is either a 2-tuple or a list of 2-tuples
# print data
if data:
if isinstance(data, tuple):
return objtype, Entry(data)
elif isinstance(data, list):
# AD sends back these search references
# if objtype == ldap.RES_SEARCH_RESULT and \
# isinstance(data[-1],tuple) and \
# not data[-1][0]:
# print "Received search reference: "
# pprint.pprint(data[-1][1])
# data.pop() # remove the last non-entry element

return objtype, [Entry(x) for x in data]
else:
raise TypeError("unknown data type %s returned by result" %
type(data))
else:
return objtype, data
elif name.startswith('add'):
# the first arg is self
# the second and third arg are the dn and the data to send
# We need to convert the Entry into the format used by
# python-ldap
ent = args[0]
if isinstance(ent, Entry):
return f(ent.dn, ent.toTupleList(), *args[2:])
else:
return f(*args, **kwargs)
else:
> return f(*args, **kwargs)

/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd165dae880>
dn = 'uid=user3,dc=example,dc=com'
modlist = [(2, 'userpassword', b'password0')]

def modify_s(self,dn,modlist):
> return self.modify_ext_s(dn,modlist,None,None)

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:640:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

args = ('uid=user3,dc=example,dc=com', [(2, 'userpassword', b'password0')], None, None)
kwargs = {}

def inner(*args, **kwargs):
if name in [
'add_s',
'bind_s',
'delete_s',
'modify_s',
'modrdn_s',
'rename_s',
'sasl_interactive_bind_s',
'search_s',
'search_ext_s',
'simple_bind_s',
'unbind_s',
'getEntry',
] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'):
c_stack = inspect.stack()
frame = c_stack[1]

warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. "
"Found in: %s:%s" % (name, frame.filename, frame.lineno)))
# Later, we will add a sleep here to make it even more painful.
# Finally, it will raise an exception.
elif 'escapehatch' in kwargs:
kwargs.pop('escapehatch')

if name == 'result':
objtype, data = f(*args, **kwargs)
# data is either a 2-tuple or a list of 2-tuples
# print data
if data:
if isinstance(data, tuple):
return objtype, Entry(data)
elif isinstance(data, list):
# AD sends back these search references
# if objtype == ldap.RES_SEARCH_RESULT and \
# isinstance(data[-1],tuple) and \
# not data[-1][0]:
# print "Received search reference: "
# pprint.pprint(data[-1][1])
# data.pop() # remove the last non-entry element

return objtype, [Entry(x) for x in data]
else:
raise TypeError("unknown data type %s returned by result" %
type(data))
else:
return objtype, data
elif name.startswith('add'):
# the first arg is self
# the second and third arg are the dn and the data to send
# We need to convert the Entry into the format used by
# python-ldap
ent = args[0]
if isinstance(ent, Entry):
return f(ent.dn, ent.toTupleList(), *args[2:])
else:
return f(*args, **kwargs)
else:
> return f(*args, **kwargs)

/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd165dae880>
dn = 'uid=user3,dc=example,dc=com'
modlist = [(2, 'userpassword', b'password0')], serverctrls = None
clientctrls = None

def modify_ext_s(self,dn,modlist,serverctrls=None,clientctrls=None):
msgid = self.modify_ext(dn,modlist,serverctrls,clientctrls)
> resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout)

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:613:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

args = (34,), kwargs = {'all': 1, 'timeout': -1}

def inner(*args, **kwargs):
if name in [
'add_s',
'bind_s',
'delete_s',
'modify_s',
'modrdn_s',
'rename_s',
'sasl_interactive_bind_s',
'search_s',
'search_ext_s',
'simple_bind_s',
'unbind_s',
'getEntry',
] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'):
c_stack = inspect.stack()
frame = c_stack[1]

warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. "
"Found in: %s:%s" % (name, frame.filename, frame.lineno)))
# Later, we will add a sleep here to make it even more painful.
# Finally, it will raise an exception.
elif 'escapehatch' in kwargs:
kwargs.pop('escapehatch')

if name == 'result':
objtype, data = f(*args, **kwargs)
# data is either a 2-tuple or a list of 2-tuples
# print data
if data:
if isinstance(data, tuple):
return objtype, Entry(data)
elif isinstance(data, list):
# AD sends back these search references
# if objtype == ldap.RES_SEARCH_RESULT and \
# isinstance(data[-1],tuple) and \
# not data[-1][0]:
# print "Received search reference: "
# pprint.pprint(data[-1][1])
# data.pop() # remove the last non-entry element

return objtype, [Entry(x) for x in data]
else:
raise TypeError("unknown data type %s returned by result" %
type(data))
else:
return objtype, data
elif name.startswith('add'):
# the first arg is self
# the second and third arg are the dn and the data to send
# We need to convert the Entry into the format used by
# python-ldap
ent = args[0]
if isinstance(ent, Entry):
return f(ent.dn, ent.toTupleList(), *args[2:])
else:
return f(*args, **kwargs)
else:
> return f(*args, **kwargs)

/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd165dae880>, msgid = 34, all = 1
timeout = -1, resp_ctrl_classes = None

def result3(self,msgid=ldap.RES_ANY,all=1,timeout=None,resp_ctrl_classes=None):
> resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4(
msgid,all,timeout,
add_ctrls=0,add_intermediates=0,add_extop=0,
resp_ctrl_classes=resp_ctrl_classes
)

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:764:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

args = (34, 1, -1)
kwargs = {'add_ctrls': 0, 'add_extop': 0, 'add_intermediates': 0, 'resp_ctrl_classes': None}

def inner(*args, **kwargs):
if name in [
'add_s',
'bind_s',
'delete_s',
'modify_s',
'modrdn_s',
'rename_s',
'sasl_interactive_bind_s',
'search_s',
'search_ext_s',
'simple_bind_s',
'unbind_s',
'getEntry',
] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'):
c_stack = inspect.stack()
frame = c_stack[1]

warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. "
"Found in: %s:%s" % (name, frame.filename, frame.lineno)))
# Later, we will add a sleep here to make it even more painful.
# Finally, it will raise an exception.
elif 'escapehatch' in kwargs:
kwargs.pop('escapehatch')

if name == 'result':
objtype, data = f(*args, **kwargs)
# data is either a 2-tuple or a list of 2-tuples
# print data
if data:
if isinstance(data, tuple):
return objtype, Entry(data)
elif isinstance(data, list):
# AD sends back these search references
# if objtype == ldap.RES_SEARCH_RESULT and \
# isinstance(data[-1],tuple) and \
# not data[-1][0]:
# print "Received search reference: "
# pprint.pprint(data[-1][1])
# data.pop() # remove the last non-entry element

return objtype, [Entry(x) for x in data]
else:
raise TypeError("unknown data type %s returned by result" %
type(data))
else:
return objtype, data
elif name.startswith('add'):
# the first arg is self
# the second and third arg are the dn and the data to send
# We need to convert the Entry into the format used by
# python-ldap
ent = args[0]
if isinstance(ent, Entry):
return f(ent.dn, ent.toTupleList(), *args[2:])
else:
return f(*args, **kwargs)
else:
> return f(*args, **kwargs)

/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd165dae880>, msgid = 34, all = 1
timeout = -1, add_ctrls = 0, add_intermediates = 0, add_extop = 0
resp_ctrl_classes = None

def result4(self,msgid=ldap.RES_ANY,all=1,timeout=None,add_ctrls=0,add_intermediates=0,add_extop=0,resp_ctrl_classes=None):
if timeout is None:
timeout = self.timeout
> ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop)

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:774:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

args = (<built-in method result4 of LDAP object at 0x7fd165ece540>, 34, 1, -1, 0, 0, ...)
kwargs = {}

def inner(*args, **kwargs):
if name in [
'add_s',
'bind_s',
'delete_s',
'modify_s',
'modrdn_s',
'rename_s',
'sasl_interactive_bind_s',
'search_s',
'search_ext_s',
'simple_bind_s',
'unbind_s',
'getEntry',
] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'):
c_stack = inspect.stack()
frame = c_stack[1]

warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. "
"Found in: %s:%s" % (name, frame.filename, frame.lineno)))
# Later, we will add a sleep here to make it even more painful.
# Finally, it will raise an exception.
elif 'escapehatch' in kwargs:
kwargs.pop('escapehatch')

if name == 'result':
objtype, data = f(*args, **kwargs)
# data is either a 2-tuple or a list of 2-tuples
# print data
if data:
if isinstance(data, tuple):
return objtype, Entry(data)
elif isinstance(data, list):
# AD sends back these search references
# if objtype == ldap.RES_SEARCH_RESULT and \
# isinstance(data[-1],tuple) and \
# not data[-1][0]:
# print "Received search reference: "
# pprint.pprint(data[-1][1])
# data.pop() # remove the last non-entry element

return objtype, [Entry(x) for x in data]
else:
raise TypeError("unknown data type %s returned by result" %
type(data))
else:
return objtype, data
elif name.startswith('add'):
# the first arg is self
# the second and third arg are the dn and the data to send
# We need to convert the Entry into the format used by
# python-ldap
ent = args[0]
if isinstance(ent, Entry):
return f(ent.dn, ent.toTupleList(), *args[2:])
else:
return f(*args, **kwargs)
else:
> return f(*args, **kwargs)

/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd165dae880>
func = <built-in method result4 of LDAP object at 0x7fd165ece540>
args = (34, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None
exc_type = None, exc_value = None, exc_traceback = None

def _ldap_call(self,func,*args,**kwargs):
"""
Wrapper method mainly for serializing calls into OpenLDAP libs
and trace logs
"""
self._ldap_object_lock.acquire()
if __debug__:
if self._trace_level>=1:
self._trace_file.write('*** %s %s - %s\n%s\n' % (
repr(self),
self._uri,
'.'.join((self.__class__.__name__,func.__name__)),
pprint.pformat((args,kwargs))
))
if self._trace_level>=9:
traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)
diagnostic_message_success = None
try:
try:
result = func(*args,**kwargs)
if __debug__ and self._trace_level>=2:
if func.__name__!="unbind_ext":
diagnostic_message_success = self._l.get_option(ldap.OPT_DIAGNOSTIC_MESSAGE)
finally:
self._ldap_object_lock.release()
except LDAPError as e:
exc_type,exc_value,exc_traceback = sys.exc_info()
try:
if 'info' not in e.args[0] and 'errno' in e.args[0]:
e.args[0]['info'] = strerror(e.args[0]['errno'])
except IndexError:
pass
if __debug__ and self._trace_level>=2:
self._trace_file.write('=> LDAPError - %s: %s\n' % (e.__class__.__name__,str(e)))
try:
> reraise(exc_type, exc_value, exc_traceback)

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:340:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

exc_type = <class 'ldap.INSUFFICIENT_ACCESS'>
exc_value = INSUFFICIENT_ACCESS({'msgtype': 103, 'msgid': 34, 'result': 50, 'desc': 'Insufficient access', 'ctrls': [], 'info': "Insufficient 'write' privilege to the 'userPassword' attribute of entry 'uid=user3,dc=example,dc=com'.\n"})
exc_traceback = <traceback object at 0x7fd165a28f00>

def reraise(exc_type, exc_value, exc_traceback):
"""Re-raise an exception given information from sys.exc_info()

Note that unlike six.reraise, this does not support replacing the
traceback. All arguments must come from a single sys.exc_info() call.
"""
# In Python 3, all exception info is contained in one object.
> raise exc_value

/usr/local/lib64/python3.8/site-packages/ldap/compat.py:46:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd165dae880>
func = <built-in method result4 of LDAP object at 0x7fd165ece540>
args = (34, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None
exc_type = None, exc_value = None, exc_traceback = None

def _ldap_call(self,func,*args,**kwargs):
"""
Wrapper method mainly for serializing calls into OpenLDAP libs
and trace logs
"""
self._ldap_object_lock.acquire()
if __debug__:
if self._trace_level>=1:
self._trace_file.write('*** %s %s - %s\n%s\n' % (
repr(self),
self._uri,
'.'.join((self.__class__.__name__,func.__name__)),
pprint.pformat((args,kwargs))
))
if self._trace_level>=9:
traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)
diagnostic_message_success = None
try:
try:
> result = func(*args,**kwargs)
E ldap.INSUFFICIENT_ACCESS: {'msgtype': 103, 'msgid': 34, 'result': 50, 'desc': 'Insufficient access', 'ctrls': [], 'info': "Insufficient 'write' privilege to the 'userPassword' attribute of entry 'uid=user3,dc=example,dc=com'.\n"}

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:324: INSUFFICIENT_ACCESS

During handling of the above exception, another exception occurred:

topology_st = <lib389.topologies.TopologyMain object at 0x7fd165daed60>

def test_ticket548_test_subtree_policy(topology_st):
"""
Check shadowAccount with subtree level password policy
"""

log.info("Case 3. Check shadowAccount with subtree level password policy")

log.info("Bind as %s" % DN_DM)
topology_st.standalone.simple_bind_s(DN_DM, PASSWORD)
# Check the global policy values

set_subtree_pwpolicy(topology_st, 2, 20, 6)

log.info('Add an entry' + USER3_DN)
try:
topology_st.standalone.add_s(
Entry((USER3_DN, {'objectclass': "top person organizationalPerson inetOrgPerson shadowAccount".split(),
'sn': '3',
'cn': 'user 3',
'uid': 'user3',
'givenname': 'user',
'mail': 'user3@' + DEFAULT_SUFFIX,
'userpassword': USER_PW})))
except ldap.LDAPError as e:
log.fatal('test_ticket548: Failed to add user' + USER3_DN + ': error ' + e.message['desc'])
assert False

log.info('Search entry %s' % USER3_DN)
entry0 = topology_st.standalone.getEntry(USER3_DN, ldap.SCOPE_BASE, "(objectclass=*)")

log.info('Expecting shadowLastChange 0 since passwordMustChange is on')
check_shadow_attr_value(entry0, 'shadowLastChange', 0, USER3_DN)

# passwordMinAge -- 2 day
check_shadow_attr_value(entry0, 'shadowMin', 2, USER3_DN)

# passwordMaxAge -- 20 days
check_shadow_attr_value(entry0, 'shadowMax', 20, USER3_DN)

# passwordWarning -- 6 days
check_shadow_attr_value(entry0, 'shadowWarning', 6, USER3_DN)

log.info("Bind as %s" % USER3_DN)
topology_st.standalone.simple_bind_s(USER3_DN, USER_PW)

log.info('Search entry %s' % USER3_DN)
try:
entry1 = topology_st.standalone.getEntry(USER3_DN, ldap.SCOPE_BASE, "(objectclass=*)")
except ldap.UNWILLING_TO_PERFORM:
log.info('test_ticket548: Search by' + USER3_DN + ' failed by UNWILLING_TO_PERFORM as expected')
except ldap.LDAPError as e:
log.fatal('test_ticket548: Failed to serch user' + USER3_DN + ' by self: error ' + e.message['desc'])
assert False

log.info("Bind as %s and updating the password with a new one" % USER3_DN)
topology_st.standalone.simple_bind_s(USER3_DN, USER_PW)

# Bind as DM again, change policy
log.info("Bind as %s" % DN_DM)
topology_st.standalone.simple_bind_s(DN_DM, PASSWORD)

set_subtree_pwpolicy(topology_st, 4, 40, 12)

newpasswd = USER_PW + '0'
> update_passwd(topology_st, USER3_DN, USER_PW, newpasswd)

/export/tests/tickets/ticket548_test.py:372:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

topology_st = <lib389.topologies.TopologyMain object at 0x7fd165daed60>
user = 'uid=user3,dc=example,dc=com', passwd = 'password'
newpasswd = 'password0'

def update_passwd(topology_st, user, passwd, newpasswd):
log.info(" Bind as {%s,%s}" % (user, passwd))
topology_st.standalone.simple_bind_s(user, passwd)
try:
topology_st.standalone.modify_s(user, [(ldap.MOD_REPLACE, 'userpassword', newpasswd.encode())])
except ldap.LDAPError as e:
> log.fatal('test_ticket548: Failed to update the password ' + cpw + ' of user ' + user + ': error ' + e.message[
'desc'])
E NameError: name 'cpw' is not defined

/export/tests/tickets/ticket548_test.py:162: NameError
XFailed suites/acl/syntax_test.py::test_aci_invalid_syntax_fail[test_targattrfilters_18] 0.01
topo = <lib389.topologies.TopologyMain object at 0x7fd179bfca60>
real_value = '(target = ldap:///cn=Jeff Vedder,ou=Product Development,dc=example,dc=com)(targetattr="*")(version 3.0; acl "Name of ...3123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123";)'

@pytest.mark.xfail(reason='https://bugzilla.redhat.com/show_bug.cgi?id=1691473')
@pytest.mark.parametrize("real_value", [a[1] for a in FAILED],
ids=[a[0] for a in FAILED])
def test_aci_invalid_syntax_fail(topo, real_value):
"""

Try to set wrong ACI syntax.

:id: 83c40784-fff5-49c8-9535-7064c9c19e7e
:parametrized: yes
:setup: Standalone Instance
:steps:
1. Create ACI
2. Try to setup the ACI with Instance
:expectedresults:
1. It should pass
2. It should not pass
"""
domain = Domain(topo.standalone, DEFAULT_SUFFIX)
with pytest.raises(ldap.INVALID_SYNTAX):
> domain.add("aci", real_value)
E Failed: DID NOT RAISE <class 'ldap.INVALID_SYNTAX'>

suites/acl/syntax_test.py:213: Failed
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
XFailed suites/acl/syntax_test.py::test_aci_invalid_syntax_fail[test_targattrfilters_20] 0.03
topo = <lib389.topologies.TopologyMain object at 0x7fd179bfca60>
real_value = '(target = ldap:///cn=Jeff Vedder,ou=Product Development,dc=example,dc=com)(targetattr="*")(version 3.0; acl "Name of the ACI"; deny(write)userdns="ldap:///anyone";)'

@pytest.mark.xfail(reason='https://bugzilla.redhat.com/show_bug.cgi?id=1691473')
@pytest.mark.parametrize("real_value", [a[1] for a in FAILED],
ids=[a[0] for a in FAILED])
def test_aci_invalid_syntax_fail(topo, real_value):
"""

Try to set wrong ACI syntax.

:id: 83c40784-fff5-49c8-9535-7064c9c19e7e
:parametrized: yes
:setup: Standalone Instance
:steps:
1. Create ACI
2. Try to setup the ACI with Instance
:expectedresults:
1. It should pass
2. It should not pass
"""
domain = Domain(topo.standalone, DEFAULT_SUFFIX)
with pytest.raises(ldap.INVALID_SYNTAX):
> domain.add("aci", real_value)
E Failed: DID NOT RAISE <class 'ldap.INVALID_SYNTAX'>

suites/acl/syntax_test.py:213: Failed
XFailed suites/acl/syntax_test.py::test_aci_invalid_syntax_fail[test_bind_rule_set_with_more_than_three] 0.01
topo = <lib389.topologies.TopologyMain object at 0x7fd179bfca60>
real_value = '(target = ldap:///dc=example,dc=com)(targetattr="*")(version 3.0; acl "Name of the ACI"; deny absolute (all)userdn="ldap:////////anyone";)'

@pytest.mark.xfail(reason='https://bugzilla.redhat.com/show_bug.cgi?id=1691473')
@pytest.mark.parametrize("real_value", [a[1] for a in FAILED],
ids=[a[0] for a in FAILED])
def test_aci_invalid_syntax_fail(topo, real_value):
"""

Try to set wrong ACI syntax.

:id: 83c40784-fff5-49c8-9535-7064c9c19e7e
:parametrized: yes
:setup: Standalone Instance
:steps:
1. Create ACI
2. Try to setup the ACI with Instance
:expectedresults:
1. It should pass
2. It should not pass
"""
domain = Domain(topo.standalone, DEFAULT_SUFFIX)
with pytest.raises(ldap.INVALID_SYNTAX):
> domain.add("aci", real_value)
E Failed: DID NOT RAISE <class 'ldap.INVALID_SYNTAX'>

suites/acl/syntax_test.py:213: Failed
XFailed suites/acl/userattr_test.py::test_mod_see_also_positive[(LEVEL_3, CHILDREN)] 0.06
topo = <lib389.topologies.TopologyMain object at 0x7fd1786a8070>
_add_user = None, user = 'uid=Grandparent,ou=Inheritance,dc=example,dc=com'
entry = 'ou=CHILDREN,ou=PARENTS,ou=GRANDPARENTS,ou=ANCESTORS,ou=Inheritance,dc=example,dc=com'

@pytest.mark.parametrize("user,entry", [
(CAN, ROLEDNACCESS),
(CAN, USERDNACCESS),
(CAN, GROUPDNACCESS),
(CAN, LDAPURLACCESS),
(CAN, ATTRNAMEACCESS),
(LEVEL_0, OU_2),
(LEVEL_1, ANCESTORS),
(LEVEL_2, GRANDPARENTS),
(LEVEL_4, OU_2),
(LEVEL_4, ANCESTORS),
(LEVEL_4, GRANDPARENTS),
(LEVEL_4, PARENTS),
(LEVEL_4, CHILDREN),
pytest.param(LEVEL_3, CHILDREN, marks=pytest.mark.xfail(reason="May be some bug")),
], ids=[
"(CAN,ROLEDNACCESS)",
"(CAN,USERDNACCESS)",
"(CAN,GROUPDNACCESS)",
"(CAN,LDAPURLACCESS)",
"(CAN,ATTRNAMEACCESS)",
"(LEVEL_0, OU_2)",
"(LEVEL_1,ANCESTORS)",
"(LEVEL_2,GRANDPARENTS)",
"(LEVEL_4,OU_2)",
"(LEVEL_4, ANCESTORS)",
"(LEVEL_4,GRANDPARENTS)",
"(LEVEL_4,PARENTS)",
"(LEVEL_4,CHILDREN)",
"(LEVEL_3, CHILDREN)"
])
def test_mod_see_also_positive(topo, _add_user, user, entry):
"""
Try to set seeAlso on entry with binding specific user, it will success
as per the ACI.

:id: 65745426-7a01-11e8-8ac2-8c16451d917b
:parametrized: yes
:setup: Standalone Instance
:steps:
1. Add test entry
2. Add ACI
3. User should follow ACI role
:expectedresults:
1. Entry should be added
2. Operation should succeed
3. Operation should succeed
"""
conn = UserAccount(topo.standalone, user).bind(PW_DM)
> UserAccount(conn, entry).replace('seeAlso', 'cn=1')

suites/acl/userattr_test.py:216:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:280: in replace
self.set(key, value, action=ldap.MOD_REPLACE)
/usr/local/lib/python3.8/site-packages/lib389/_mapped_object.py:446: in set
return self._instance.modify_ext_s(self._dn, [(action, key, value)],
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:613: in modify_ext_s
resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:764: in result3
resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4(
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:774: in result4
ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop)
/usr/local/lib/python3.8/site-packages/lib389/__init__.py:180: in inner
return f(*args, **kwargs)
/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:340: in _ldap_call
reraise(exc_type, exc_value, exc_traceback)
/usr/local/lib64/python3.8/site-packages/ldap/compat.py:46: in reraise
raise exc_value
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <lib389.DirSrv object at 0x7fd179b26a60>
func = <built-in method result4 of LDAP object at 0x7fd179b48c30>
args = (5, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None
exc_type = None, exc_value = None, exc_traceback = None

def _ldap_call(self,func,*args,**kwargs):
"""
Wrapper method mainly for serializing calls into OpenLDAP libs
and trace logs
"""
self._ldap_object_lock.acquire()
if __debug__:
if self._trace_level>=1:
self._trace_file.write('*** %s %s - %s\n%s\n' % (
repr(self),
self._uri,
'.'.join((self.__class__.__name__,func.__name__)),
pprint.pformat((args,kwargs))
))
if self._trace_level>=9:
traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)
diagnostic_message_success = None
try:
try:
> result = func(*args,**kwargs)
E ldap.INSUFFICIENT_ACCESS: {'msgtype': 103, 'msgid': 5, 'result': 50, 'desc': 'Insufficient access', 'ctrls': [], 'info': "Insufficient 'write' privilege to the 'seeAlso' attribute of entry 'ou=children,ou=parents,ou=grandparents,ou=ancestors,ou=inheritance,dc=example,dc=com'.\n"}

/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py:324: INSUFFICIENT_ACCESS
XFailed suites/config/config_test.py::test_defaultnamingcontext_1 0.31
topo = <lib389.topologies.TopologyMain object at 0x7fd178201100>

@pytest.mark.xfail(reason="This may fail due to bug 1610234")
def test_defaultnamingcontext_1(topo):
"""This test case should be part of function test_defaultnamingcontext
Please move it back after we have a fix for bug 1610234
"""
log.info("Remove the original suffix which is currently nsslapd-defaultnamingcontext"
"and check nsslapd-defaultnamingcontext become empty.")

""" Please remove these declarations after moving the test
to function test_defaultnamingcontext
"""
backends = Backends(topo.standalone)
test_db2 = 'test2_db'
test_suffix2 = 'dc=test2,dc=com'
b2 = backends.create(properties={'cn': test_db2,
'nsslapd-suffix': test_suffix2})
b2.delete()
> assert topo.standalone.config.get_attr_val_utf8('nsslapd-defaultnamingcontext') == ' '
E AssertionError: assert 'dc=example,dc=com' == ' '
E Strings contain only whitespace, escaping them using repr()
E - ' '
E + 'dc=example,dc=com'

suites/config/config_test.py:280: AssertionError
-------------------------------Captured log call--------------------------------
INFO  tests.suites.config.config_test:config_test.py:268 Remove the original suffix which is currently nsslapd-defaultnamingcontextand check nsslapd-defaultnamingcontext become empty.
XFailed suites/export/export_test.py::test_dbtasks_db2ldif_with_non_accessible_ldif_file_path_output 3.63
topo = <lib389.topologies.TopologyMain object at 0x7fd166135610>

@pytest.mark.bz1860291
@pytest.mark.xfail(reason="bug 1860291")
@pytest.mark.skipif(ds_is_older("1.3.10", "1.4.2"), reason="Not implemented")
def test_dbtasks_db2ldif_with_non_accessible_ldif_file_path_output(topo):
"""Export with db2ldif, giving a ldif file path which can't be accessed by the user (dirsrv by default)

:id: fcc63387-e650-40a7-b643-baa68c190037
:setup: Standalone Instance - entries imported in the db
:steps:
1. Stop the server
2. Launch db2ldif with a non accessible ldif file path
3. check the error reported in the command output
:expected results:
1. Operation successful
2. Operation properly fails
3. An clear error message is reported as output of the cli
"""
export_ldif = '/tmp/nonexistent/export.ldif'

log.info("Stopping the instance...")
topo.standalone.stop()

log.info("Performing an offline export to a non accessible ldif file path - should fail and output a clear error message")
expected_output="No such file or directory"
> run_db2ldif_and_clear_logs(topo, topo.standalone, DEFAULT_BENAME, export_ldif, expected_output)

suites/export/export_test.py:150:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

topology = <lib389.topologies.TopologyMain object at 0x7fd166135610>
instance = <lib389.DirSrv object at 0x7fd166135700>, backend = 'userRoot'
ldif = '/tmp/nonexistent/export.ldif', output_msg = 'No such file or directory'
encrypt = False, repl = False

def run_db2ldif_and_clear_logs(topology, instance, backend, ldif, output_msg, encrypt=False, repl=False):
args = FakeArgs()
args.instance = instance.serverid
args.backend = backend
args.encrypted = encrypt
args.replication = repl
args.ldif = ldif

dbtasks_db2ldif(instance, topology.logcap.log, args)

log.info('checking output msg')
if not topology.logcap.contains(output_msg):
log.error('The output message is not the expected one')
> assert False
E assert False

suites/export/export_test.py:36: AssertionError
------------------------------Captured stderr call------------------------------
ldiffile: /tmp/nonexistent/export.ldif
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:export_test.py:145 Stopping the instance... INFO  lib389.utils:export_test.py:148 Performing an offline export to a non accessible ldif file path - should fail and output a clear error message CRITICAL LogCapture:dbtasks.py:40 db2ldif failed INFO  lib389.utils:export_test.py:33 checking output msg ERROR  lib389.utils:export_test.py:35 The output message is not the expected one
XFailed suites/healthcheck/healthcheck_test.py::test_healthcheck_unable_to_query_backend 1.47
topology_st = <lib389.topologies.TopologyMain object at 0x7fd1647a6790>

@pytest.mark.ds50873
@pytest.mark.bz1796343
@pytest.mark.skipif(ds_is_older("1.4.1"), reason="Not implemented")
@pytest.mark.xfail(reason="Will fail because of bz1837315. Set proper version after bug is fixed")
def test_healthcheck_unable_to_query_backend(topology_st):
"""Check if HealthCheck returns DSBLE0002 code

:id: 716b1ff1-94bd-4780-98b8-96ff8ef21e30
:setup: Standalone instance
:steps:
1. Create DS instance
2. Create a new root suffix and database
3. Disable new suffix
4. Use HealthCheck without --json option
5. Use HealthCheck with --json option
:expectedresults:
1. Success
2. Success
3. Success
4. HealthCheck should return code DSBLE0002
5. HealthCheck should return code DSBLE0002
"""

RET_CODE = 'DSBLE0002'
NEW_SUFFIX = 'dc=test,dc=com'
NEW_BACKEND = 'userData'

standalone = topology_st.standalone

log.info('Create new suffix')
backends = Backends(standalone)
backends.create(properties={
'cn': NEW_BACKEND,
'nsslapd-suffix': NEW_SUFFIX,
})

log.info('Disable the newly created suffix')
mts = MappingTrees(standalone)
mt_new = mts.get(NEW_SUFFIX)
mt_new.replace('nsslapd-state', 'disabled')

run_healthcheck_and_flush_log(topology_st, standalone, RET_CODE, json=False)
run_healthcheck_and_flush_log(topology_st, standalone, RET_CODE, json=True)

log.info('Enable the suffix again and check if nothing is broken')
mt_new.replace('nsslapd-state', 'backend')
> run_healthcheck_and_flush_log(topology_st, standalone, RET_CODE, json=False)

suites/healthcheck/healthcheck_test.py:453:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

topology = <lib389.topologies.TopologyMain object at 0x7fd1647a6790>
instance = <lib389.DirSrv object at 0x7fd174313130>, searched_code = 'DSBLE0002'
json = False, searched_code2 = None, list_checks = False, list_errors = False
check = None, searched_list = None

def run_healthcheck_and_flush_log(topology, instance, searched_code=None, json=False, searched_code2=None,
list_checks=False, list_errors=False, check=None, searched_list=None):
args = FakeArgs()
args.instance = instance.serverid
args.verbose = instance.verbose
args.list_errors = list_errors
args.list_checks = list_checks
args.check = check
args.dry_run = False
args.json = json

log.info('Use healthcheck with --json == {} option'.format(json))
health_check_run(instance, topology.logcap.log, args)

if searched_list is not None:
for item in searched_list:
assert topology.logcap.contains(item)
log.info('Healthcheck returned searched item: %s' % item)
else:
> assert topology.logcap.contains(searched_code)
E AssertionError: assert False
E + where False = <bound method LogCapture.contains of <LogCapture (NOTSET)>>('DSBLE0002')
E + where <bound method LogCapture.contains of <LogCapture (NOTSET)>> = <LogCapture (NOTSET)>.contains
E + where <LogCapture (NOTSET)> = <lib389.topologies.TopologyMain object at 0x7fd1647a6790>.logcap

suites/healthcheck/healthcheck_test.py:49: AssertionError
-------------------------------Captured log call--------------------------------
INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking config:hr_timestamp ... INFO  LogCapture:health.py:99 Checking config:passwordscheme ... INFO  LogCapture:health.py:99 Checking backends:userdata:cl_trimming ... INFO  LogCapture:health.py:99 Checking backends:userdata:mappingtree ... INFO  LogCapture:health.py:99 Checking backends:userdata:search ... INFO  LogCapture:health.py:99 Checking backends:userdata:virt_attrs ... INFO  LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO  LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO  LogCapture:health.py:99 Checking backends:userroot:search ... INFO  LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO  LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO  LogCapture:health.py:99 Checking fschecks:file_perms ... INFO  LogCapture:health.py:99 Checking refint:attr_indexes ... INFO  LogCapture:health.py:99 Checking refint:update_delay ... INFO  LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO  LogCapture:health.py:99 Checking replication:agmts_status ... INFO  LogCapture:health.py:99 Checking replication:conflicts ... INFO  LogCapture:health.py:99 Checking dseldif:nsstate ... INFO  LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO  LogCapture:health.py:99 Checking logs:notes ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:119 4 Issues found! Generating report ... INFO  LogCapture:health.py:45 [1] DS Lint Error: DSBLE0001 INFO  LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO  LogCapture:health.py:47 Severity: MEDIUM INFO  LogCapture:health.py:49 Check: backends:userdata:mappingtree INFO  LogCapture:health.py:50 Affects: INFO  LogCapture:health.py:52 -- userdata INFO  LogCapture:health.py:53 Details: INFO  LogCapture:health.py:54 ----------- INFO  LogCapture:health.py:55 This backend may be missing the correct mapping tree references. Mapping Trees allow the directory server to determine which backend an operation is routed to in the abscence of other information. This is extremely important for correct functioning of LDAP ADD for example. A correct Mapping tree for this backend must contain the suffix name, the database name and be a backend type. IE: cn=o3Dexample,cn=mapping tree,cn=config cn: o=example nsslapd-backend: userRoot nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree INFO  LogCapture:health.py:56 Resolution: INFO  LogCapture:health.py:57 ----------- INFO  LogCapture:health.py:58 Either you need to create the mapping tree, or you need to repair the related mapping tree. You will need to do this by hand by editing cn=config, or stopping the instance and editing dse.ldif. INFO  LogCapture:health.py:45 [2] DS Lint Error: DSBLE0002 INFO  LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO  LogCapture:health.py:47 Severity: HIGH INFO  LogCapture:health.py:49 Check: backends:userdata:search INFO  LogCapture:health.py:50 Affects: INFO  LogCapture:health.py:52 -- dc=test,dc=com INFO  LogCapture:health.py:53 Details: INFO  LogCapture:health.py:54 ----------- INFO  LogCapture:health.py:55 Unable to query the backend. LDAP error ({'msgtype': 101, 'msgid': 26, 'result': 1, 'desc': 'Operations error', 'ctrls': [], 'info': 'Warning: Operation attempted on a disabled node : dc=example,dc=com\n'}) INFO  LogCapture:health.py:56 Resolution: INFO  LogCapture:health.py:57 ----------- INFO  LogCapture:health.py:58 Check the server's error and access logs for more information. INFO  LogCapture:health.py:45 [3] DS Lint Error: DSBLE0001 INFO  LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO  LogCapture:health.py:47 Severity: MEDIUM INFO  LogCapture:health.py:49 Check: backends:userdata:mappingtree INFO  LogCapture:health.py:50 Affects: INFO  LogCapture:health.py:52 -- userdata INFO  LogCapture:health.py:53 Details: INFO  LogCapture:health.py:54 ----------- INFO  LogCapture:health.py:55 This backend may be missing the correct mapping tree references. Mapping Trees allow the directory server to determine which backend an operation is routed to in the abscence of other information. This is extremely important for correct functioning of LDAP ADD for example. A correct Mapping tree for this backend must contain the suffix name, the database name and be a backend type. IE: cn=o3Dexample,cn=mapping tree,cn=config cn: o=example nsslapd-backend: userRoot nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree INFO  LogCapture:health.py:56 Resolution: INFO  LogCapture:health.py:57 ----------- INFO  LogCapture:health.py:58 Either you need to create the mapping tree, or you need to repair the related mapping tree. You will need to do this by hand by editing cn=config, or stopping the instance and editing dse.ldif. INFO  LogCapture:health.py:45 [4] DS Lint Error: DSBLE0002 INFO  LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO  LogCapture:health.py:47 Severity: HIGH INFO  LogCapture:health.py:49 Check: backends:userdata:search INFO  LogCapture:health.py:50 Affects: INFO  LogCapture:health.py:52 -- dc=test,dc=com INFO  LogCapture:health.py:53 Details: INFO  LogCapture:health.py:54 ----------- INFO  LogCapture:health.py:55 Unable to query the backend. LDAP error ({'msgtype': 101, 'msgid': 26, 'result': 1, 'desc': 'Operations error', 'ctrls': [], 'info': 'Warning: Operation attempted on a disabled node : dc=example,dc=com\n'}) INFO  LogCapture:health.py:56 Resolution: INFO  LogCapture:health.py:57 ----------- INFO  LogCapture:health.py:58 Check the server's error and access logs for more information. INFO  LogCapture:health.py:124 ===== End Of Report (4 Issues found) ===== INFO  LogCapture:health.py:126 [ { "dsle": "DSBLE0001", "severity": "MEDIUM", "description": "Possibly incorrect mapping tree.", "items": [ "userdata" ], "detail": "This backend may be missing the correct mapping tree references. Mapping Trees allow\nthe directory server to determine which backend an operation is routed to in the\nabscence of other information. This is extremely important for correct functioning\nof LDAP ADD for example.\n\nA correct Mapping tree for this backend must contain the suffix name, the database name\nand be a backend type. IE:\n\ncn=o3Dexample,cn=mapping tree,cn=config\ncn: o=example\nnsslapd-backend: userRoot\nnsslapd-state: backend\nobjectClass: top\nobjectClass: extensibleObject\nobjectClass: nsMappingTree\n\n", "fix": "Either you need to create the mapping tree, or you need to repair the related\nmapping tree. You will need to do this by hand by editing cn=config, or stopping\nthe instance and editing dse.ldif.\n", "check": "backends:userdata:mappingtree" }, { "dsle": "DSBLE0002", "severity": "HIGH", "description": "Unable to query backend.", "items": [ "dc=test,dc=com" ], "detail": "Unable to query the backend. LDAP error ({'msgtype': 101, 'msgid': 26, 'result': 1, 'desc': 'Operations error', 'ctrls': [], 'info': 'Warning: Operation attempted on a disabled node : dc=example,dc=com\\n'})", "fix": "Check the server's error and access logs for more information.", "check": "backends:userdata:search" }, { "dsle": "DSBLE0001", "severity": "MEDIUM", "description": "Possibly incorrect mapping tree.", "items": [ "userdata" ], "detail": "This backend may be missing the correct mapping tree references. Mapping Trees allow\nthe directory server to determine which backend an operation is routed to in the\nabscence of other information. This is extremely important for correct functioning\nof LDAP ADD for example.\n\nA correct Mapping tree for this backend must contain the suffix name, the database name\nand be a backend type. IE:\n\ncn=o3Dexample,cn=mapping tree,cn=config\ncn: o=example\nnsslapd-backend: userRoot\nnsslapd-state: backend\nobjectClass: top\nobjectClass: extensibleObject\nobjectClass: nsMappingTree\n\n", "fix": "Either you need to create the mapping tree, or you need to repair the related\nmapping tree. You will need to do this by hand by editing cn=config, or stopping\nthe instance and editing dse.ldif.\n", "check": "backends:userdata:mappingtree" }, { "dsle": "DSBLE0002", "severity": "HIGH", "description": "Unable to query backend.", "items": [ "dc=test,dc=com" ], "detail": "Unable to query the backend. LDAP error ({'msgtype': 101, 'msgid': 26, 'result': 1, 'desc': 'Operations error', 'ctrls': [], 'info': 'Warning: Operation attempted on a disabled node : dc=example,dc=com\\n'})", "fix": "Check the server's error and access logs for more information.", "check": "backends:userdata:search" } ] INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking config:hr_timestamp ... INFO  LogCapture:health.py:99 Checking config:passwordscheme ... INFO  LogCapture:health.py:99 Checking backends:userdata:cl_trimming ... INFO  LogCapture:health.py:99 Checking backends:userdata:mappingtree ... INFO  LogCapture:health.py:99 Checking backends:userdata:search ... INFO  LogCapture:health.py:99 Checking backends:userdata:virt_attrs ... INFO  LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO  LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO  LogCapture:health.py:99 Checking backends:userroot:search ... INFO  LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO  LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO  LogCapture:health.py:99 Checking fschecks:file_perms ... INFO  LogCapture:health.py:99 Checking refint:attr_indexes ... INFO  LogCapture:health.py:99 Checking refint:update_delay ... INFO  LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO  LogCapture:health.py:99 Checking replication:agmts_status ... INFO  LogCapture:health.py:99 Checking replication:conflicts ... INFO  LogCapture:health.py:99 Checking dseldif:nsstate ... INFO  LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO  LogCapture:health.py:99 Checking logs:notes ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:119 2 Issues found! Generating report ... INFO  LogCapture:health.py:45 [1] DS Lint Error: DSBLE0003 INFO  LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO  LogCapture:health.py:47 Severity: LOW INFO  LogCapture:health.py:49 Check: backends:userdata:search INFO  LogCapture:health.py:50 Affects: INFO  LogCapture:health.py:52 -- dc=test,dc=com INFO  LogCapture:health.py:53 Details: INFO  LogCapture:health.py:54 ----------- INFO  LogCapture:health.py:55 The backend database has not been initialized yet INFO  LogCapture:health.py:56 Resolution: INFO  LogCapture:health.py:57 ----------- INFO  LogCapture:health.py:58 You need to import an LDIF file, or create the suffix entry, in order to initialize the database. INFO  LogCapture:health.py:45 [2] DS Lint Error: DSBLE0003 INFO  LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO  LogCapture:health.py:47 Severity: LOW INFO  LogCapture:health.py:49 Check: backends:userdata:search INFO  LogCapture:health.py:50 Affects: INFO  LogCapture:health.py:52 -- dc=test,dc=com INFO  LogCapture:health.py:53 Details: INFO  LogCapture:health.py:54 ----------- INFO  LogCapture:health.py:55 The backend database has not been initialized yet INFO  LogCapture:health.py:56 Resolution: INFO  LogCapture:health.py:57 ----------- INFO  LogCapture:health.py:58 You need to import an LDIF file, or create the suffix entry, in order to initialize the database. INFO  LogCapture:health.py:124 ===== End Of Report (2 Issues found) =====
XFailed suites/replication/conflict_resolve_test.py::TestTwoMasters::test_memberof_groups 0.00
self = <tests.suites.replication.conflict_resolve_test.TestTwoMasters object at 0x7fd1762ed580>
topology_m2 = <lib389.topologies.TopologyMain object at 0x7fd17a953460>
base_m2 = <lib389.idm.nscontainer.nsContainer object at 0x7fd176318820>

def test_memberof_groups(self, topology_m2, base_m2):
"""Check that conflict properly resolved for operations
with memberOf and groups

:id: 77f09b18-03d1-45da-940b-1ad2c2908eb3
:setup: Two master replication, test container for entries, enable plugin logging,
audit log, error log for replica and access log for internal
:steps:
1. Enable memberOf plugin
2. Add 30 users to m1 and wait for replication to happen
3. Pause replication
4. Create a group on m1 and m2
5. Create a group on m1 and m2, delete from m1
6. Create a group on m1, delete from m1, and create on m2,
7. Create a group on m2 and m1, delete from m1
8. Create two different groups on m2
9. Resume replication
10. Check that the entries on both masters are the same and replication is working
:expectedresults:
1. It should pass
2. It should pass
3. It should pass
4. It should pass
5. It should pass
6. It should pass
7. It should pass
8. It should pass
9. It should pass
10. It should pass
"""

> pytest.xfail("Issue 49591 - work in progress")
E _pytest.outcomes.XFailed: Issue 49591 - work in progress

suites/replication/conflict_resolve_test.py:402: XFailed
XFailed suites/replication/conflict_resolve_test.py::TestTwoMasters::test_managed_entries 0.00
self = <tests.suites.replication.conflict_resolve_test.TestTwoMasters object at 0x7fd175716850>
topology_m2 = <lib389.topologies.TopologyMain object at 0x7fd17a953460>

def test_managed_entries(self, topology_m2):
"""Check that conflict properly resolved for operations
with managed entries

:id: 77f09b18-03d1-45da-940b-1ad2c2908eb4
:setup: Two master replication, test container for entries, enable plugin logging,
audit log, error log for replica and access log for internal
:steps:
1. Create ou=managed_users and ou=managed_groups under test container
2. Configure managed entries plugin and add a template to test container
3. Add a user to m1 and wait for replication to happen
4. Pause replication
5. Create a user on m1 and m2 with a same group ID on both master
6. Create a user on m1 and m2 with a different group ID on both master
7. Resume replication
8. Check that the entries on both masters are the same and replication is working
:expectedresults:
1. It should pass
2. It should pass
3. It should pass
4. It should pass
5. It should pass
6. It should pass
7. It should pass
8. It should pass
"""

> pytest.xfail("Issue 49591 - work in progress")
E _pytest.outcomes.XFailed: Issue 49591 - work in progress

suites/replication/conflict_resolve_test.py:493: XFailed
XFailed suites/replication/conflict_resolve_test.py::TestTwoMasters::test_nested_entries_with_children 0.00
self = <tests.suites.replication.conflict_resolve_test.TestTwoMasters object at 0x7fd1763203d0>
topology_m2 = <lib389.topologies.TopologyMain object at 0x7fd17a953460>
base_m2 = <lib389.idm.nscontainer.nsContainer object at 0x7fd176320610>

def test_nested_entries_with_children(self, topology_m2, base_m2):
"""Check that conflict properly resolved for operations
with nested entries with children

:id: 77f09b18-03d1-45da-940b-1ad2c2908eb5
:setup: Two master replication, test container for entries, enable plugin logging,
audit log, error log for replica and access log for internal
:steps:
1. Add 15 containers to m1 and wait for replication to happen
2. Pause replication
3. Create parent-child on master2 and master1
4. Create parent-child on master1 and master2
5. Create parent-child on master1 and master2 different child rdn
6. Create parent-child on master1 and delete parent on master2
7. Create parent on master1, delete it and parent-child on master2, delete them
8. Create parent on master1, delete it and parent-two children on master2
9. Create parent-two children on master1 and parent-child on master2, delete them
10. Create three subsets inside existing container entry, applying only part of changes on m2
11. Create more combinations of the subset with parent-child on m1 and parent on m2
12. Delete container on m1, modify user1 on m1, create parent on m2 and modify user2 on m2
13. Resume replication
14. Check that the entries on both masters are the same and replication is working
:expectedresults:
1. It should pass
2. It should pass
3. It should pass
4. It should pass
5. It should pass
6. It should pass
7. It should pass
8. It should pass
9. It should pass
10. It should pass
11. It should pass
12. It should pass
13. It should pass
14. It should pass
"""

> pytest.xfail("Issue 49591 - work in progress")
E _pytest.outcomes.XFailed: Issue 49591 - work in progress

suites/replication/conflict_resolve_test.py:584: XFailed
XFailed suites/replication/conflict_resolve_test.py::TestThreeMasters::test_nested_entries 0.00
self = <tests.suites.replication.conflict_resolve_test.TestThreeMasters object at 0x7fd176306040>
topology_m3 = <lib389.topologies.TopologyMain object at 0x7fd1762f3460>
base_m3 = <lib389.idm.nscontainer.nsContainer object at 0x7fd1740ebb50>

def test_nested_entries(self, topology_m3, base_m3):
"""Check that conflict properly resolved for operations
with nested entries with children

:id: 77f09b18-03d1-45da-940b-1ad2c2908eb6
:setup: Three master replication, test container for entries, enable plugin logging,
audit log, error log for replica and access log for internal
:steps:
1. Add 15 containers to m1 and wait for replication to happen
2. Pause replication
3. Create two child entries under each of two entries
4. Create three child entries under each of three entries
5. Create two parents on m1 and m2, then on m1 - create a child and delete one parent,
on m2 - delete one parent and create a child
6. Test a few more parent-child combinations with three instances
7. Resume replication
8. Check that the entries on both masters are the same and replication is working
:expectedresults:
1. It should pass
2. It should pass
3. It should pass
4. It should pass
5. It should pass
6. It should pass
7. It should pass
8. It should pass
"""

> pytest.xfail("Issue 49591 - work in progress")
E _pytest.outcomes.XFailed: Issue 49591 - work in progress

suites/replication/conflict_resolve_test.py:968: XFailed
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master3 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'master3', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect da5f53bc-788f-4ccc-8d75-a45a9fd284d1 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 686f7016-e3e7-4608-9903-b3c07c14630a / got description=da5f53bc-788f-4ccc-8d75-a45a9fd284d1) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:156 Joining master master3 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect dc0ee8d6-e1c0-4c35-aec6-56f634168ec1 / got description=686f7016-e3e7-4608-9903-b3c07c14630a) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect b5857d52-c5b1-4a70-a6b8-a572a2b04566 / got description=dc0ee8d6-e1c0-4c35-aec6-56f634168ec1) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect b5857d52-c5b1-4a70-a6b8-a572a2b04566 / got description=dc0ee8d6-e1c0-4c35-aec6-56f634168ec1) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect b5857d52-c5b1-4a70-a6b8-a572a2b04566 / got description=dc0ee8d6-e1c0-4c35-aec6-56f634168ec1) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master3 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master3 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master2 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created
XFailed suites/replication/replica_config_test.py::test_agmt_num_add[nsds5ReplicaPort-0-65535-9999999999999999999999999999999999999999999999999999999999999999999-invalid-389] 0.08
topo = <lib389.topologies.TopologyMain object at 0x7fd1740d3130>
attr = 'nsds5ReplicaPort', too_small = '0', too_big = '65535'
overflow = '9999999999999999999999999999999999999999999999999999999999999999999'
notnum = 'invalid', valid = '389'

@pytest.mark.xfail(reason="Agreement validation current does not work.")
@pytest.mark.parametrize("attr, too_small, too_big, overflow, notnum, valid", agmt_attrs)
def test_agmt_num_add(topo, attr, too_small, too_big, overflow, notnum, valid):
"""Test all the number values you can set for a replica config entry

:id: a8b47d4a-a089-4d70-8070-e6181209bf94
:parametrized: yes
:setup: standalone instance
:steps:
1. Use a value that is too small
2. Use a value that is too big
3. Use a value that overflows the int
4. Use a value with character value (not a number)
5. Use a valid value
:expectedresults:
1. Add is rejected
2. Add is rejected
3. Add is rejected
4. Add is rejected
5. Add is allowed
"""

agmt_reset(topo)
replica = replica_setup(topo)

agmts = Agreements(topo.standalone, basedn=replica.dn)

# Test too small
perform_invalid_create(agmts, agmt_dict, attr, too_small)
# Test too big
> perform_invalid_create(agmts, agmt_dict, attr, too_big)

suites/replication/replica_config_test.py:217:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

many = <lib389.agreement.Agreements object at 0x7fd1740fc2b0>
properties = {'cn': 'test_agreement', 'nsDS5ReplicaBindDN': 'uid=tester', 'nsDS5ReplicaBindMethod': 'SIMPLE', 'nsDS5ReplicaHost': 'localhost.localdomain', ...}
attr = 'nsds5ReplicaPort', value = '65535'

def perform_invalid_create(many, properties, attr, value):
my_properties = copy.deepcopy(properties)
my_properties[attr] = value
with pytest.raises(ldap.LDAPError) as ei:
> many.create(properties=my_properties)
E Failed: DID NOT RAISE <class 'ldap.LDAPError'>

suites/replication/replica_config_test.py:108: Failed
XFailed suites/replication/replica_config_test.py::test_agmt_num_modify[nsds5ReplicaPort-0-65535-9999999999999999999999999999999999999999999999999999999999999999999-invalid-389] 0.17
topo = <lib389.topologies.TopologyMain object at 0x7fd1740d3130>
attr = 'nsds5ReplicaPort', too_small = '0', too_big = '65535'
overflow = '9999999999999999999999999999999999999999999999999999999999999999999'
notnum = 'invalid', valid = '389'

@pytest.mark.xfail(reason="Agreement validation current does not work.")
@pytest.mark.parametrize("attr, too_small, too_big, overflow, notnum, valid", agmt_attrs)
def test_agmt_num_modify(topo, attr, too_small, too_big, overflow, notnum, valid):
"""Test all the number values you can set for a replica config entry

:id: a8b47d4a-a089-4d70-8070-e6181209bf95
:parametrized: yes
:setup: standalone instance
:steps:
1. Replace a value that is too small
2. Replace a value that is too big
3. Replace a value that overflows the int
4. Replace a value with character value (not a number)
5. Replace a vlue with a valid value
:expectedresults:
1. Value is rejected
2. Value is rejected
3. Value is rejected
4. Value is rejected
5. Value is allowed
"""

agmt = agmt_setup(topo)

# Value too small
> perform_invalid_modify(agmt, attr, too_small)

suites/replication/replica_config_test.py:253:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

o = <lib389.agreement.Agreement object at 0x7fd17482f5b0>
attr = 'nsds5ReplicaPort', value = '0'

def perform_invalid_modify(o, attr, value):
with pytest.raises(ldap.LDAPError) as ei:
> o.replace(attr, value)
E Failed: DID NOT RAISE <class 'ldap.LDAPError'>

suites/replication/replica_config_test.py:113: Failed
XFailed suites/replication/replica_config_test.py::test_agmt_num_modify[nsds5ReplicaTimeout--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] 0.42
topo = <lib389.topologies.TopologyMain object at 0x7fd1740d3130>
attr = 'nsds5ReplicaTimeout', too_small = '-1', too_big = '9223372036854775807'
overflow = '9999999999999999999999999999999999999999999999999999999999999999999'
notnum = 'invalid', valid = '6'

@pytest.mark.xfail(reason="Agreement validation current does not work.")
@pytest.mark.parametrize("attr, too_small, too_big, overflow, notnum, valid", agmt_attrs)
def test_agmt_num_modify(topo, attr, too_small, too_big, overflow, notnum, valid):
"""Test all the number values you can set for a replica config entry

:id: a8b47d4a-a089-4d70-8070-e6181209bf95
:parametrized: yes
:setup: standalone instance
:steps:
1. Replace a value that is too small
2. Replace a value that is too big
3. Replace a value that overflows the int
4. Replace a value with character value (not a number)
5. Replace a vlue with a valid value
:expectedresults:
1. Value is rejected
2. Value is rejected
3. Value is rejected
4. Value is rejected
5. Value is allowed
"""

agmt = agmt_setup(topo)

# Value too small
perform_invalid_modify(agmt, attr, too_small)
# Value too big
> perform_invalid_modify(agmt, attr, too_big)

suites/replication/replica_config_test.py:255:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

o = <lib389.agreement.Agreement object at 0x7fd174099ac0>
attr = 'nsds5ReplicaTimeout', value = '9223372036854775807'

def perform_invalid_modify(o, attr, value):
with pytest.raises(ldap.LDAPError) as ei:
> o.replace(attr, value)
E Failed: DID NOT RAISE <class 'ldap.LDAPError'>

suites/replication/replica_config_test.py:113: Failed
XFailed suites/replication/replica_config_test.py::test_agmt_num_modify[nsds5ReplicaBusyWaitTime--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] 0.18
topo = <lib389.topologies.TopologyMain object at 0x7fd1740d3130>
attr = 'nsds5ReplicaBusyWaitTime', too_small = '-1'
too_big = '9223372036854775807'
overflow = '9999999999999999999999999999999999999999999999999999999999999999999'
notnum = 'invalid', valid = '6'

@pytest.mark.xfail(reason="Agreement validation current does not work.")
@pytest.mark.parametrize("attr, too_small, too_big, overflow, notnum, valid", agmt_attrs)
def test_agmt_num_modify(topo, attr, too_small, too_big, overflow, notnum, valid):
"""Test all the number values you can set for a replica config entry

:id: a8b47d4a-a089-4d70-8070-e6181209bf95
:parametrized: yes
:setup: standalone instance
:steps:
1. Replace a value that is too small
2. Replace a value that is too big
3. Replace a value that overflows the int
4. Replace a value with character value (not a number)
5. Replace a vlue with a valid value
:expectedresults:
1. Value is rejected
2. Value is rejected
3. Value is rejected
4. Value is rejected
5. Value is allowed
"""

agmt = agmt_setup(topo)

# Value too small
perform_invalid_modify(agmt, attr, too_small)
# Value too big
> perform_invalid_modify(agmt, attr, too_big)

suites/replication/replica_config_test.py:255:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

o = <lib389.agreement.Agreement object at 0x7fd174079340>
attr = 'nsds5ReplicaBusyWaitTime', value = '9223372036854775807'

def perform_invalid_modify(o, attr, value):
with pytest.raises(ldap.LDAPError) as ei:
> o.replace(attr, value)
E Failed: DID NOT RAISE <class 'ldap.LDAPError'>

suites/replication/replica_config_test.py:113: Failed
XFailed suites/replication/replica_config_test.py::test_agmt_num_modify[nsds5ReplicaSessionPauseTime--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] 0.17
topo = <lib389.topologies.TopologyMain object at 0x7fd1740d3130>
attr = 'nsds5ReplicaSessionPauseTime', too_small = '-1'
too_big = '9223372036854775807'
overflow = '9999999999999999999999999999999999999999999999999999999999999999999'
notnum = 'invalid', valid = '6'

@pytest.mark.xfail(reason="Agreement validation current does not work.")
@pytest.mark.parametrize("attr, too_small, too_big, overflow, notnum, valid", agmt_attrs)
def test_agmt_num_modify(topo, attr, too_small, too_big, overflow, notnum, valid):
"""Test all the number values you can set for a replica config entry

:id: a8b47d4a-a089-4d70-8070-e6181209bf95
:parametrized: yes
:setup: standalone instance
:steps:
1. Replace a value that is too small
2. Replace a value that is too big
3. Replace a value that overflows the int
4. Replace a value with character value (not a number)
5. Replace a vlue with a valid value
:expectedresults:
1. Value is rejected
2. Value is rejected
3. Value is rejected
4. Value is rejected
5. Value is allowed
"""

agmt = agmt_setup(topo)

# Value too small
perform_invalid_modify(agmt, attr, too_small)
# Value too big
> perform_invalid_modify(agmt, attr, too_big)

suites/replication/replica_config_test.py:255:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

o = <lib389.agreement.Agreement object at 0x7fd174076130>
attr = 'nsds5ReplicaSessionPauseTime', value = '9223372036854775807'

def perform_invalid_modify(o, attr, value):
with pytest.raises(ldap.LDAPError) as ei:
> o.replace(attr, value)
E Failed: DID NOT RAISE <class 'ldap.LDAPError'>

suites/replication/replica_config_test.py:113: Failed
XFailed suites/replication/replica_config_test.py::test_agmt_num_modify[nsds5ReplicaFlowControlWindow--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] 0.18
topo = <lib389.topologies.TopologyMain object at 0x7fd1740d3130>
attr = 'nsds5ReplicaFlowControlWindow', too_small = '-1'
too_big = '9223372036854775807'
overflow = '9999999999999999999999999999999999999999999999999999999999999999999'
notnum = 'invalid', valid = '6'

@pytest.mark.xfail(reason="Agreement validation current does not work.")
@pytest.mark.parametrize("attr, too_small, too_big, overflow, notnum, valid", agmt_attrs)
def test_agmt_num_modify(topo, attr, too_small, too_big, overflow, notnum, valid):
"""Test all the number values you can set for a replica config entry

:id: a8b47d4a-a089-4d70-8070-e6181209bf95
:parametrized: yes
:setup: standalone instance
:steps:
1. Replace a value that is too small
2. Replace a value that is too big
3. Replace a value that overflows the int
4. Replace a value with character value (not a number)
5. Replace a vlue with a valid value
:expectedresults:
1. Value is rejected
2. Value is rejected
3. Value is rejected
4. Value is rejected
5. Value is allowed
"""

agmt = agmt_setup(topo)

# Value too small
perform_invalid_modify(agmt, attr, too_small)
# Value too big
> perform_invalid_modify(agmt, attr, too_big)

suites/replication/replica_config_test.py:255:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

o = <lib389.agreement.Agreement object at 0x7fd174072e20>
attr = 'nsds5ReplicaFlowControlWindow', value = '9223372036854775807'

def perform_invalid_modify(o, attr, value):
with pytest.raises(ldap.LDAPError) as ei:
> o.replace(attr, value)
E Failed: DID NOT RAISE <class 'ldap.LDAPError'>

suites/replication/replica_config_test.py:113: Failed
XFailed suites/replication/replica_config_test.py::test_agmt_num_modify[nsds5ReplicaFlowControlPause--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] 0.18
topo = <lib389.topologies.TopologyMain object at 0x7fd1740d3130>
attr = 'nsds5ReplicaFlowControlPause', too_small = '-1'
too_big = '9223372036854775807'
overflow = '9999999999999999999999999999999999999999999999999999999999999999999'
notnum = 'invalid', valid = '6'

@pytest.mark.xfail(reason="Agreement validation current does not work.")
@pytest.mark.parametrize("attr, too_small, too_big, overflow, notnum, valid", agmt_attrs)
def test_agmt_num_modify(topo, attr, too_small, too_big, overflow, notnum, valid):
"""Test all the number values you can set for a replica config entry

:id: a8b47d4a-a089-4d70-8070-e6181209bf95
:parametrized: yes
:setup: standalone instance
:steps:
1. Replace a value that is too small
2. Replace a value that is too big
3. Replace a value that overflows the int
4. Replace a value with character value (not a number)
5. Replace a vlue with a valid value
:expectedresults:
1. Value is rejected
2. Value is rejected
3. Value is rejected
4. Value is rejected
5. Value is allowed
"""

agmt = agmt_setup(topo)

# Value too small
perform_invalid_modify(agmt, attr, too_small)
# Value too big
> perform_invalid_modify(agmt, attr, too_big)

suites/replication/replica_config_test.py:255:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

o = <lib389.agreement.Agreement object at 0x7fd1740a1cd0>
attr = 'nsds5ReplicaFlowControlPause', value = '9223372036854775807'

def perform_invalid_modify(o, attr, value):
with pytest.raises(ldap.LDAPError) as ei:
> o.replace(attr, value)
E Failed: DID NOT RAISE <class 'ldap.LDAPError'>

suites/replication/replica_config_test.py:113: Failed
XFailed suites/replication/replica_config_test.py::test_agmt_num_modify[nsds5ReplicaProtocolTimeout--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] 0.17
topo = <lib389.topologies.TopologyMain object at 0x7fd1740d3130>
attr = 'nsds5ReplicaProtocolTimeout', too_small = '-1'
too_big = '9223372036854775807'
overflow = '9999999999999999999999999999999999999999999999999999999999999999999'
notnum = 'invalid', valid = '6'

@pytest.mark.xfail(reason="Agreement validation current does not work.")
@pytest.mark.parametrize("attr, too_small, too_big, overflow, notnum, valid", agmt_attrs)
def test_agmt_num_modify(topo, attr, too_small, too_big, overflow, notnum, valid):
"""Test all the number values you can set for a replica config entry

:id: a8b47d4a-a089-4d70-8070-e6181209bf95
:parametrized: yes
:setup: standalone instance
:steps:
1. Replace a value that is too small
2. Replace a value that is too big
3. Replace a value that overflows the int
4. Replace a value with character value (not a number)
5. Replace a vlue with a valid value
:expectedresults:
1. Value is rejected
2. Value is rejected
3. Value is rejected
4. Value is rejected
5. Value is allowed
"""

agmt = agmt_setup(topo)

# Value too small
perform_invalid_modify(agmt, attr, too_small)
# Value too big
> perform_invalid_modify(agmt, attr, too_big)

suites/replication/replica_config_test.py:255:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

o = <lib389.agreement.Agreement object at 0x7fd1740a6640>
attr = 'nsds5ReplicaProtocolTimeout', value = '9223372036854775807'

def perform_invalid_modify(o, attr, value):
with pytest.raises(ldap.LDAPError) as ei:
> o.replace(attr, value)
E Failed: DID NOT RAISE <class 'ldap.LDAPError'>

suites/replication/replica_config_test.py:113: Failed
XFailed suites/replication/ruvstore_test.py::test_memoryruv_sync_with_databaseruv 0.04
topo = <lib389.topologies.TopologyMain object at 0x7fd17aaf2220>

@pytest.mark.xfail(reason="No method to safety access DB ruv currently exists online.")
def test_memoryruv_sync_with_databaseruv(topo):
"""Check if memory ruv and database ruv are synced

:id: 5f38ac5f-6353-460d-bf60-49cafffda5b3
:setup: Replication with two masters.
:steps: 1. Add user to server and compare memory ruv and database ruv.
2. Modify description of user and compare memory ruv and database ruv.
3. Modrdn of user and compare memory ruv and database ruv.
4. Delete user and compare memory ruv and database ruv.
:expectedresults:
1. For add user, the memory ruv and database ruv should be the same.
2. For modify operation, the memory ruv and database ruv should be the same.
3. For modrdn operation, the memory ruv and database ruv should be the same.
4. For delete operation, the memory ruv and database ruv should be the same.
"""

log.info('Adding user: {} to master1'.format(TEST_ENTRY_NAME))
users = UserAccounts(topo.ms['master1'], DEFAULT_SUFFIX)
tuser = users.create(properties=USER_PROPERTIES)
> _compare_memoryruv_and_databaseruv(topo, 'add')

suites/replication/ruvstore_test.py:139:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

topo = <lib389.topologies.TopologyMain object at 0x7fd17aaf2220>
operation_type = 'add'

def _compare_memoryruv_and_databaseruv(topo, operation_type):
"""Compare the memoryruv and databaseruv for ldap operations"""

log.info('Checking memory ruv for ldap: {} operation'.format(operation_type))
replicas = Replicas(topo.ms['master1'])
replica = replicas.list()[0]
memory_ruv = replica.get_attr_val_utf8('nsds50ruv')

log.info('Checking database ruv for ldap: {} operation'.format(operation_type))
> entry = replicas.get_ruv_entry(DEFAULT_SUFFIX)
E AttributeError: 'Replicas' object has no attribute 'get_ruv_entry'

suites/replication/ruvstore_test.py:81: AttributeError
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.ruvstore_test:ruvstore_test.py:136 Adding user: rep2lusr to master1 INFO  tests.suites.replication.ruvstore_test:ruvstore_test.py:75 Checking memory ruv for ldap: add operation INFO  tests.suites.replication.ruvstore_test:ruvstore_test.py:80 Checking database ruv for ldap: add operation
XPassed suites/acl/syntax_test.py::test_aci_invalid_syntax_fail[test_Use_double_equal_instead_of_equal_in_the_targetattr] 0.19
No log output captured.
XPassed suites/acl/syntax_test.py::test_aci_invalid_syntax_fail[test_Use_double_equal_instead_of_equal_in_the_targetfilter] 0.02
No log output captured.
XPassed suites/replication/replica_config_test.py::test_agmt_num_add[nsds5ReplicaTimeout--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] 0.20
No log output captured.
XPassed suites/replication/replica_config_test.py::test_agmt_num_add[nsds5ReplicaBusyWaitTime--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] 0.21
No log output captured.
XPassed suites/replication/replica_config_test.py::test_agmt_num_add[nsds5ReplicaSessionPauseTime--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] 0.20
No log output captured.
XPassed suites/replication/replica_config_test.py::test_agmt_num_add[nsds5ReplicaFlowControlWindow--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] 0.21
No log output captured.
XPassed suites/replication/replica_config_test.py::test_agmt_num_add[nsds5ReplicaFlowControlPause--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] 0.46
No log output captured.
XPassed suites/replication/replica_config_test.py::test_agmt_num_add[nsds5ReplicaProtocolTimeout--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] 0.20
No log output captured.
Skipped suites/auth_token/basic_auth_test.py::test_ldap_auth_token_config::setup 0.00
('suites/auth_token/basic_auth_test.py', 28, 'Skipped: Auth tokens are not available in older versions')
Skipped suites/auth_token/basic_auth_test.py::test_ldap_auth_token_nsuser::setup 0.00
('suites/auth_token/basic_auth_test.py', 75, 'Skipped: Auth tokens are not available in older versions')
Skipped suites/auth_token/basic_auth_test.py::test_ldap_auth_token_disabled::setup 0.00
('suites/auth_token/basic_auth_test.py', 144, 'Skipped: Auth tokens are not available in older versions')
Skipped suites/auth_token/basic_auth_test.py::test_ldap_auth_token_directory_manager::setup 0.00
('suites/auth_token/basic_auth_test.py', 194, 'Skipped: Auth tokens are not available in older versions')
Skipped suites/auth_token/basic_auth_test.py::test_ldap_auth_token_anonymous::setup 0.00
('suites/auth_token/basic_auth_test.py', 217, 'Skipped: Auth tokens are not available in older versions')
Skipped suites/config/regression_test.py::test_set_cachememsize_to_custom_value::setup 0.00
('suites/config/regression_test.py', 34, 'Skipped: available memory is too low')
Skipped suites/ds_logs/ds_logs_test.py::test_etime_at_border_of_second::setup 0.00
('suites/ds_logs/ds_logs_test.py', 735, 'Skipped: rsearch was removed')
Skipped suites/entryuuid/basic_test.py::test_entryuuid_indexed_import_and_search::setup 0.00
('suites/entryuuid/basic_test.py', 73, 'Skipped: Entryuuid is not available in older versions')
Skipped suites/entryuuid/basic_test.py::test_entryuuid_unindexed_import_and_search::setup 0.00
('suites/entryuuid/basic_test.py', 113, 'Skipped: Entryuuid is not available in older versions')
Skipped suites/entryuuid/basic_test.py::test_entryuuid_generation_on_add::setup 0.00
('suites/entryuuid/basic_test.py', 155, 'Skipped: Entryuuid is not available in older versions')
Skipped suites/entryuuid/basic_test.py::test_entryuuid_fixup_task::setup 0.00
('suites/entryuuid/basic_test.py', 179, 'Skipped: Entryuuid is not available in older versions')
Skipped suites/memory_leaks/MMR_double_free_test.py::test_MMR_double_free::setup 0.00
('suites/memory_leaks/MMR_double_free_test.py', 67, "Skipped: Don't run if ASAN is not enabled")
Skipped suites/memory_leaks/range_search_test.py::test_range_search::setup 0.00
('suites/memory_leaks/range_search_test.py', 24, "Skipped: Don't run if ASAN is not enabled")
Skipped suites/migration/export_data_test.py::test_export_data_from_source_host::setup 0.00
('suites/migration/export_data_test.py', 24, 'Skipped: This test is meant to execute in specific test environment')
Skipped suites/migration/import_data_test.py::test_import_data_to_target_host::setup 0.00
('suites/migration/import_data_test.py', 24, 'Skipped: This test is meant to execute in specific test environment')
Skipped suites/replication/changelog_test.py::test_cldump_files_removed::setup 0.00
('suites/replication/changelog_test.py', 235, 'Skipped: does not work for prefix builds')
Skipped suites/replication/changelog_test.py::test_changelog_compactdbinterval::setup 0.00
('suites/replication/changelog_test.py', 630, 'Skipped: changelog compaction is done by the backend itself, with id2entry as well, nsslapd-changelogcompactdb-interval is no longer supported')
Skipped suites/rewriters/adfilter_test.py::test_adfilter_objectSid::setup 0.00
('suites/rewriters/adfilter_test.py', 90, 'Skipped: It is missing samba python bindings')
Skipped tickets/ticket47462_test.py::test_ticket47462::setup 0.00
('tickets/ticket47462_test.py', 39, 'Skipped: Upgrade scripts are supported only on versions < 1.4.x')
Skipped tickets/ticket47815_test.py::test_ticket47815::setup 0.00
('tickets/ticket47815_test.py', 26, 'Skipped: Not implemented, or invalid by nsMemberOf')
Skipped tickets/ticket49121_test.py::test_ticket49121::setup 0.00
('tickets/ticket49121_test.py', 32, "Skipped: Don't run if ASAN is not enabled")
Passed suites/acl/acivattr_test.py::test_positive[(ENG_USER, ENG_MANAGER, REAL_EQ_ACI)] 0.04
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/acl/acivattr_test.py::test_positive[(ENG_USER, ENG_MANAGER, REAL_PRES_ACI)] 0.04
No log output captured.
Passed suites/acl/acivattr_test.py::test_positive[(ENG_USER, ENG_MANAGER, REAL_SUB_ACI)] 0.04
No log output captured.
Passed suites/acl/acivattr_test.py::test_positive[(ENG_USER, ENG_MANAGER, ROLE_PRES_ACI)] 0.04
No log output captured.
Passed suites/acl/acivattr_test.py::test_positive[(ENG_USER, ENG_MANAGER, ROLE_SUB_ACI)] 0.04
No log output captured.
Passed suites/acl/acivattr_test.py::test_positive[(ENG_USER, ENG_MANAGER, COS_EQ_ACI)] 0.04
No log output captured.
Passed suites/acl/acivattr_test.py::test_positive[(ENG_USER, ENG_MANAGER, COS_PRES_ACI)] 0.05
No log output captured.
Passed suites/acl/acivattr_test.py::test_positive[(ENG_USER, ENG_MANAGER, COS_SUB_ACI)] 0.04
No log output captured.
Passed suites/acl/acivattr_test.py::test_positive[(ENG_USER, ENG_MANAGER, LDAPURL_ACI)] 0.04
No log output captured.
Passed suites/acl/acivattr_test.py::test_negative[(ENG_USER, SALES_MANAGER, REAL_EQ_ACI)] 0.06
No log output captured.
Passed suites/acl/acivattr_test.py::test_negative[(ENG_USER, SALES_OU, REAL_PRES_ACI)] 0.05
No log output captured.
Passed suites/acl/acivattr_test.py::test_negative[(ENG_USER, SALES_MANAGER, REAL_SUB_ACI)] 0.06
No log output captured.
Passed suites/acl/acivattr_test.py::test_negative[(ENG_USER, SALES_MANAGER, ROLE_EQ_ACI)] 0.05
No log output captured.
Passed suites/acl/acivattr_test.py::test_negative[(ENG_USER, SALES_MANAGER, ROLE_PRES_ACI)] 0.05
No log output captured.
Passed suites/acl/acivattr_test.py::test_negative[(ENG_USER, SALES_MANAGER, ROLE_SUB_ACI)] 0.05
No log output captured.
Passed suites/acl/acivattr_test.py::test_negative[(ENG_USER, SALES_MANAGER, COS_EQ_ACI)] 0.05
No log output captured.
Passed suites/acl/acivattr_test.py::test_negative[(ENG_USER, SALES_MANAGER, COS_PRES_ACI)] 0.05
No log output captured.
Passed suites/acl/acivattr_test.py::test_negative[(ENG_USER, SALES_MANAGER, COS_SUB_ACI)] 0.05
No log output captured.
Passed suites/acl/acivattr_test.py::test_negative[(SALES_UESER, SALES_MANAGER, LDAPURL_ACI)] 0.06
No log output captured.
Passed suites/acl/acivattr_test.py::test_negative[(ENG_USER, ENG_MANAGER, ROLE_EQ_ACI)] 0.05
No log output captured.
Passed suites/acl/acl_deny_test.py::test_multi_deny_aci 11.51
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389:acl_deny_test.py:47 Add uid=tuser1,ou=People,dc=example,dc=com INFO  lib389:acl_deny_test.py:58 Add uid=tuser,ou=People,dc=example,dc=com
-------------------------------Captured log call--------------------------------
INFO  lib389:acl_deny_test.py:90 Pass 1 INFO  lib389:acl_deny_test.py:93 Testing two searches behave the same... INFO  lib389:acl_deny_test.py:136 Testing search does not return any entries... INFO  lib389:acl_deny_test.py:90 Pass 2 INFO  lib389:acl_deny_test.py:93 Testing two searches behave the same... INFO  lib389:acl_deny_test.py:136 Testing search does not return any entries... INFO  lib389:acl_deny_test.py:200 Test PASSED
Passed suites/acl/acl_test.py::test_aci_attr_subtype_targetattr[lang-ja] 0.00
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 8fcce356-c75c-4309-981c-f9c0a1c86137 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 2be12f15-3ff0-4c4d-9949-98221005b2cb / got description=8fcce356-c75c-4309-981c-f9c0a1c86137) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  tests.suites.acl.acl_test:acl_test.py:76 ========Executing test with 'lang-ja' subtype======== INFO  tests.suites.acl.acl_test:acl_test.py:77 Add a target attribute INFO  tests.suites.acl.acl_test:acl_test.py:80 Add a user attribute INFO  tests.suites.acl.acl_test:acl_test.py:88 Add an ACI with attribute subtype
-------------------------------Captured log call--------------------------------
INFO  tests.suites.acl.acl_test:acl_test.py:118 Search for the added attribute INFO  tests.suites.acl.acl_test:acl_test.py:125 The added attribute was found
Passed suites/acl/acl_test.py::test_aci_attr_subtype_targetattr[binary] 0.00
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.acl.acl_test:acl_test.py:76 ========Executing test with 'binary' subtype======== INFO  tests.suites.acl.acl_test:acl_test.py:77 Add a target attribute INFO  tests.suites.acl.acl_test:acl_test.py:80 Add a user attribute INFO  tests.suites.acl.acl_test:acl_test.py:88 Add an ACI with attribute subtype
-------------------------------Captured log call--------------------------------
INFO  tests.suites.acl.acl_test:acl_test.py:118 Search for the added attribute INFO  tests.suites.acl.acl_test:acl_test.py:125 The added attribute was found
Passed suites/acl/acl_test.py::test_aci_attr_subtype_targetattr[phonetic] 0.00
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.acl.acl_test:acl_test.py:76 ========Executing test with 'phonetic' subtype======== INFO  tests.suites.acl.acl_test:acl_test.py:77 Add a target attribute INFO  tests.suites.acl.acl_test:acl_test.py:80 Add a user attribute INFO  tests.suites.acl.acl_test:acl_test.py:88 Add an ACI with attribute subtype
-------------------------------Captured log call--------------------------------
INFO  tests.suites.acl.acl_test:acl_test.py:118 Search for the added attribute INFO  tests.suites.acl.acl_test:acl_test.py:125 The added attribute was found
Passed suites/acl/acl_test.py::test_mode_default_add_deny 0.03
-------------------------------Captured log setup-------------------------------
INFO  lib389:acl_test.py:233 ######## INITIALIZATION ######## INFO  lib389:acl_test.py:236 Add uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:254 Add cn=staged user,dc=example,dc=com INFO  lib389:acl_test.py:258 Add cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:262 Add cn=excepts,cn=accounts,dc=example,dc=com
-------------------------------Captured log call--------------------------------
INFO  lib389:acl_test.py:294 ######## mode moddn_aci : ADD (should fail) ######## INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:302 Try to add cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:311 Exception (expected): INSUFFICIENT_ACCESS
Passed suites/acl/acl_test.py::test_mode_default_delete_deny 0.02
-------------------------------Captured log call--------------------------------
INFO  lib389:acl_test.py:329 ######## DELETE (should fail) ######## INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:336 Try to delete cn=staged user,dc=example,dc=com INFO  lib389:acl_test.py:341 Exception (expected): INSUFFICIENT_ACCESS
Passed suites/acl/acl_test.py::test_moddn_staging_prod[0-cn=staged user,dc=example,dc=com-cn=accounts,dc=example,dc=com-False] 0.16
-------------------------------Captured log call--------------------------------
INFO  lib389:acl_test.py:376 ######## MOVE staging -> Prod (0) ######## INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:388 Try to MODDN uid=new_account0,cn=staged user,dc=example,dc=com -> uid=new_account0,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:395 Exception (expected): INSUFFICIENT_ACCESS INFO  lib389:acl_test.py:399 ######## MOVE to and from equality filter ######## INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:406 Try to MODDN uid=new_account0,cn=staged user,dc=example,dc=com -> uid=new_account0,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com
Passed suites/acl/acl_test.py::test_moddn_staging_prod[1-cn=staged user,dc=example,dc=com-cn=accounts,dc=example,dc=com-False] 0.16
-------------------------------Captured log call--------------------------------
INFO  lib389:acl_test.py:376 ######## MOVE staging -> Prod (1) ######## INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:388 Try to MODDN uid=new_account1,cn=staged user,dc=example,dc=com -> uid=new_account1,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:395 Exception (expected): INSUFFICIENT_ACCESS INFO  lib389:acl_test.py:399 ######## MOVE to and from equality filter ######## INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:406 Try to MODDN uid=new_account1,cn=staged user,dc=example,dc=com -> uid=new_account1,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com
Passed suites/acl/acl_test.py::test_moddn_staging_prod[2-cn=staged user,dc=example,dc=com-cn=bad*,dc=example,dc=com-True] 0.16
-------------------------------Captured log call--------------------------------
INFO  lib389:acl_test.py:376 ######## MOVE staging -> Prod (2) ######## INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:388 Try to MODDN uid=new_account2,cn=staged user,dc=example,dc=com -> uid=new_account2,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:395 Exception (expected): INSUFFICIENT_ACCESS INFO  lib389:acl_test.py:399 ######## MOVE to and from equality filter ######## INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:406 Try to MODDN uid=new_account2,cn=staged user,dc=example,dc=com -> uid=new_account2,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:409 Exception (expected): INSUFFICIENT_ACCESS INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com
Passed suites/acl/acl_test.py::test_moddn_staging_prod[3-cn=st*,dc=example,dc=com-cn=accounts,dc=example,dc=com-False] 0.15
-------------------------------Captured log call--------------------------------
INFO  lib389:acl_test.py:376 ######## MOVE staging -> Prod (3) ######## INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:388 Try to MODDN uid=new_account3,cn=staged user,dc=example,dc=com -> uid=new_account3,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:395 Exception (expected): INSUFFICIENT_ACCESS INFO  lib389:acl_test.py:399 ######## MOVE to and from equality filter ######## INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:406 Try to MODDN uid=new_account3,cn=staged user,dc=example,dc=com -> uid=new_account3,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com
Passed suites/acl/acl_test.py::test_moddn_staging_prod[4-cn=bad*,dc=example,dc=com-cn=accounts,dc=example,dc=com-True] 0.17
-------------------------------Captured log call--------------------------------
INFO  lib389:acl_test.py:376 ######## MOVE staging -> Prod (4) ######## INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:388 Try to MODDN uid=new_account4,cn=staged user,dc=example,dc=com -> uid=new_account4,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:395 Exception (expected): INSUFFICIENT_ACCESS INFO  lib389:acl_test.py:399 ######## MOVE to and from equality filter ######## INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:406 Try to MODDN uid=new_account4,cn=staged user,dc=example,dc=com -> uid=new_account4,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:409 Exception (expected): INSUFFICIENT_ACCESS INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com
Passed suites/acl/acl_test.py::test_moddn_staging_prod[5-cn=st*,dc=example,dc=com-cn=ac*,dc=example,dc=com-False] 0.15
-------------------------------Captured log call--------------------------------
INFO  lib389:acl_test.py:376 ######## MOVE staging -> Prod (5) ######## INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:388 Try to MODDN uid=new_account5,cn=staged user,dc=example,dc=com -> uid=new_account5,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:395 Exception (expected): INSUFFICIENT_ACCESS INFO  lib389:acl_test.py:399 ######## MOVE to and from equality filter ######## INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:406 Try to MODDN uid=new_account5,cn=staged user,dc=example,dc=com -> uid=new_account5,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com
Passed suites/acl/acl_test.py::test_moddn_staging_prod[6-None-cn=ac*,dc=example,dc=com-False] 0.15
-------------------------------Captured log call--------------------------------
INFO  lib389:acl_test.py:376 ######## MOVE staging -> Prod (6) ######## INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:388 Try to MODDN uid=new_account6,cn=staged user,dc=example,dc=com -> uid=new_account6,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:395 Exception (expected): INSUFFICIENT_ACCESS INFO  lib389:acl_test.py:399 ######## MOVE to and from equality filter ######## INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:406 Try to MODDN uid=new_account6,cn=staged user,dc=example,dc=com -> uid=new_account6,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com
Passed suites/acl/acl_test.py::test_moddn_staging_prod[7-cn=st*,dc=example,dc=com-None-False] 0.16
-------------------------------Captured log call--------------------------------
INFO  lib389:acl_test.py:376 ######## MOVE staging -> Prod (7) ######## INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:388 Try to MODDN uid=new_account7,cn=staged user,dc=example,dc=com -> uid=new_account7,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:395 Exception (expected): INSUFFICIENT_ACCESS INFO  lib389:acl_test.py:399 ######## MOVE to and from equality filter ######## INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:406 Try to MODDN uid=new_account7,cn=staged user,dc=example,dc=com -> uid=new_account7,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com
Passed suites/acl/acl_test.py::test_moddn_staging_prod[8-None-None-False] 0.38
-------------------------------Captured log call--------------------------------
INFO  lib389:acl_test.py:376 ######## MOVE staging -> Prod (8) ######## INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:388 Try to MODDN uid=new_account8,cn=staged user,dc=example,dc=com -> uid=new_account8,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:395 Exception (expected): INSUFFICIENT_ACCESS INFO  lib389:acl_test.py:399 ######## MOVE to and from equality filter ######## INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:406 Try to MODDN uid=new_account8,cn=staged user,dc=example,dc=com -> uid=new_account8,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com
Passed suites/acl/acl_test.py::test_moddn_staging_prod_9 0.66
-------------------------------Captured log call--------------------------------
INFO  lib389:acl_test.py:453 ######## MOVE staging -> Prod (9) ######## INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:466 Try to MODDN uid=new_account9,cn=staged user,dc=example,dc=com -> uid=new_account9,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:473 Exception (expected): INSUFFICIENT_ACCESS INFO  lib389:acl_test.py:479 Disable the moddn right INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:484 ######## MOVE to and from equality filter ######## INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:492 Try to MODDN uid=new_account9,cn=staged user,dc=example,dc=com -> uid=new_account9,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:499 Exception (expected): INSUFFICIENT_ACCESS INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:521 Try to MODDN uid=new_account9,cn=staged user,dc=example,dc=com -> uid=new_account9,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:532 Enable the moddn right INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:536 ######## MOVE staging -> Prod (10) ######## INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:548 Try to MODDN uid=new_account10,cn=staged user,dc=example,dc=com -> uid=new_account10,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:555 Exception (expected): INSUFFICIENT_ACCESS INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:572 Try to MODDN uid=new_account10,cn=staged user,dc=example,dc=com -> uid=new_account10,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:579 Exception (expected): INSUFFICIENT_ACCESS INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:588 ######## MOVE to and from equality filter ######## INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:594 Try to MODDN uid=new_account10,cn=staged user,dc=example,dc=com -> uid=new_account10,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com
Passed suites/acl/acl_test.py::test_moddn_prod_staging 0.31
-------------------------------Captured log call--------------------------------
INFO  lib389:acl_test.py:623 ######## MOVE staging -> Prod (11) ######## INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:636 Try to MODDN uid=new_account11,cn=staged user,dc=example,dc=com -> uid=new_account11,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:643 Exception (expected): INSUFFICIENT_ACCESS INFO  lib389:acl_test.py:647 ######## MOVE to and from equality filter ######## INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:653 Try to MODDN uid=new_account11,cn=staged user,dc=example,dc=com -> uid=new_account11,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:668 Try to move back MODDN uid=new_account11,cn=accounts,dc=example,dc=com -> uid=new_account11,cn=staged user,dc=example,dc=com INFO  lib389:acl_test.py:675 Exception (expected): INSUFFICIENT_ACCESS INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com
Passed suites/acl/acl_test.py::test_check_repl_M2_to_M1 1.04
-------------------------------Captured log call--------------------------------
INFO  lib389:acl_test.py:705 Bind as cn=Directory Manager (M2) INFO  lib389:acl_test.py:725 Update (M2) uid=new_account12,cn=staged user,dc=example,dc=com (description) INFO  lib389:acl_test.py:738 Update uid=new_account12,cn=staged user,dc=example,dc=com (description) replicated on M1
Passed suites/acl/acl_test.py::test_moddn_staging_prod_except 0.22
-------------------------------Captured log call--------------------------------
INFO  lib389:acl_test.py:763 ######## MOVE staging -> Prod (13) ######## INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:775 Try to MODDN uid=new_account13,cn=staged user,dc=example,dc=com -> uid=new_account13,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:782 Exception (expected): INSUFFICIENT_ACCESS INFO  lib389:acl_test.py:786 ######## MOVE to and from equality filter ######## INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:160 Add a DENY aci under cn=excepts,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:793 Try to MODDN uid=new_account13,cn=staged user,dc=example,dc=com -> uid=new_account13,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:799 ######## MOVE staging -> Prod/Except (14) ######## INFO  lib389:acl_test.py:805 Try to MODDN uid=new_account14,cn=staged user,dc=example,dc=com -> uid=new_account14,cn=excepts,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:812 Exception (expected): INSUFFICIENT_ACCESS INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:160 Add a DENY aci under cn=excepts,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com
Passed suites/acl/acl_test.py::test_mode_default_ger_no_moddn 0.00
-------------------------------Captured log call--------------------------------
INFO  lib389:acl_test.py:839 ######## mode moddn_aci : GER no moddn ######## INFO  lib389:acl_test.py:850 dn: cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:850 dn: cn=excepts,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:850 dn: uid=new_account0,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:850 dn: uid=new_account1,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:850 dn: uid=new_account3,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:850 dn: uid=new_account5,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:850 dn: uid=new_account6,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:850 dn: uid=new_account7,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:850 dn: uid=new_account8,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:850 dn: uid=new_account9,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:850 dn: uid=new_account10,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:850 dn: uid=new_account11,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:850 dn: uid=new_account13,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:853 ######## entryLevelRights: b'v'
Passed suites/acl/acl_test.py::test_mode_default_ger_with_moddn 0.14
-------------------------------Captured log call--------------------------------
INFO  lib389:acl_test.py:877 ######## mode moddn_aci: GER with moddn ######## INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:895 dn: cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:895 dn: cn=excepts,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:895 dn: uid=new_account0,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:895 dn: uid=new_account1,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:895 dn: uid=new_account3,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:895 dn: uid=new_account5,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:895 dn: uid=new_account6,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:895 dn: uid=new_account7,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:895 dn: uid=new_account8,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:895 dn: uid=new_account9,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:895 dn: uid=new_account10,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:895 dn: uid=new_account11,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:895 dn: uid=new_account13,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:898 ######## entryLevelRights: b'vn' INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com
Passed suites/acl/acl_test.py::test_mode_legacy_ger_no_moddn1 0.04
-------------------------------Captured log call--------------------------------
INFO  lib389:acl_test.py:928 ######## Disable the moddn aci mod ######## INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:932 ######## mode legacy 1: GER no moddn ######## INFO  lib389:acl_test.py:942 dn: cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:942 dn: cn=excepts,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:942 dn: uid=new_account0,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:942 dn: uid=new_account1,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:942 dn: uid=new_account3,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:942 dn: uid=new_account5,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:942 dn: uid=new_account6,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:942 dn: uid=new_account7,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:942 dn: uid=new_account8,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:942 dn: uid=new_account9,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:942 dn: uid=new_account10,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:942 dn: uid=new_account11,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:942 dn: uid=new_account13,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:945 ######## entryLevelRights: b'v'
Passed suites/acl/acl_test.py::test_mode_legacy_ger_no_moddn2 0.37
-------------------------------Captured log call--------------------------------
INFO  lib389:acl_test.py:971 ######## Disable the moddn aci mod ######## INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:975 ######## mode legacy 2: GER no moddn ######## INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:992 dn: cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:992 dn: cn=excepts,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:992 dn: uid=new_account0,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:992 dn: uid=new_account1,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:992 dn: uid=new_account3,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:992 dn: uid=new_account5,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:992 dn: uid=new_account6,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:992 dn: uid=new_account7,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:992 dn: uid=new_account8,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:992 dn: uid=new_account9,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:992 dn: uid=new_account10,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:992 dn: uid=new_account11,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:992 dn: uid=new_account13,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:995 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com
Passed suites/acl/acl_test.py::test_mode_legacy_ger_with_moddn 0.11
-------------------------------Captured log call--------------------------------
INFO  lib389:acl_test.py:1031 ######## Disable the moddn aci mod ######## INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:1035 ######## mode legacy : GER with moddn ######## INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager INFO  lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:1057 dn: cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:1057 dn: cn=excepts,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:1057 dn: uid=new_account0,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:1057 dn: uid=new_account1,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:1057 dn: uid=new_account3,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:1057 dn: uid=new_account5,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:1057 dn: uid=new_account6,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:1057 dn: uid=new_account7,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:1057 dn: uid=new_account8,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:1057 dn: uid=new_account9,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:1057 dn: uid=new_account10,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:1057 dn: uid=new_account11,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:1057 dn: uid=new_account13,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:1060 ######## entryLevelRights: b'vn' INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager
Passed suites/acl/acl_test.py::test_rdn_write_get_ger 0.01
-------------------------------Captured log setup-------------------------------
INFO  lib389:acl_test.py:1071 ######## Add entry tuser ########
-------------------------------Captured log call--------------------------------
INFO  lib389:acl_test.py:1097 ######## GER rights for anonymous ######## INFO  lib389:acl_test.py:1107 dn: dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: ou=groups,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: ou=people,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: ou=permissions,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: ou=services,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: uid=demo_user,ou=people,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: cn=demo_group,ou=groups,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: cn=group_admin,ou=permissions,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: cn=group_modify,ou=permissions,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: cn=user_admin,ou=permissions,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: cn=user_modify,ou=permissions,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: cn=user_passwd_reset,ou=permissions,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: cn=user_private_read,ou=permissions,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: cn=replication_managers,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: cn=ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701,ou=services,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: cn=ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702,ou=services,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: uid=bind_entry,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: cn=staged user,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: cn=excepts,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: uid=new_account0,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: uid=new_account1,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: uid=new_account2,cn=staged user,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: uid=new_account3,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: uid=new_account4,cn=staged user,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: uid=new_account5,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: uid=new_account6,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: uid=new_account7,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: uid=new_account8,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: uid=new_account9,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: uid=new_account10,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: uid=new_account11,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: uid=new_account12,cn=staged user,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: uid=new_account13,cn=accounts,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: uid=new_account14,cn=staged user,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: uid=new_account15,cn=staged user,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: uid=new_account16,cn=staged user,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: uid=new_account17,cn=staged user,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: uid=new_account18,cn=staged user,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: uid=new_account19,cn=staged user,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO  lib389:acl_test.py:1107 dn: cn=tuser,dc=example,dc=com INFO  lib389:acl_test.py:1109 ######## entryLevelRights: b'v'
Passed suites/acl/acl_test.py::test_rdn_write_modrdn_anonymous 0.30
-------------------------------Captured log call--------------------------------
INFO  lib389:acl_test.py:1136 dn: INFO  lib389:acl_test.py:1138 ######## 'objectClass': [b'top'] INFO  lib389:acl_test.py:1138 ######## 'defaultnamingcontext': [b'dc=example,dc=com'] INFO  lib389:acl_test.py:1138 ######## 'dataversion': [b'020201029001639'] INFO  lib389:acl_test.py:1138 ######## 'netscapemdsuffix': [b'cn=ldap://dc=localhost,dc=localdomain:39001'] INFO  lib389:acl_test.py:1143 Exception (expected): INSUFFICIENT_ACCESS INFO  lib389:acl_test.py:1150 The entry was not renamed (expected) INFO  lib389:acl_test.py:133 Bind as cn=Directory Manager
Passed suites/acl/deladd_test.py::test_allow_delete_access_to_groupdn 0.06
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/acl/deladd_test.py::test_allow_add_access_to_anyone 0.07
No log output captured.
Passed suites/acl/deladd_test.py::test_allow_delete_access_to_anyone 0.04
No log output captured.
Passed suites/acl/deladd_test.py::test_allow_delete_access_not_to_userdn 0.06
No log output captured.
Passed suites/acl/deladd_test.py::test_allow_delete_access_not_to_group 0.07
No log output captured.
Passed suites/acl/deladd_test.py::test_allow_add_access_to_parent 0.05
No log output captured.
Passed suites/acl/deladd_test.py::test_allow_delete_access_to_parent 0.05
No log output captured.
Passed suites/acl/deladd_test.py::test_allow_delete_access_to_dynamic_group 0.05
No log output captured.
Passed suites/acl/deladd_test.py::test_allow_delete_access_to_dynamic_group_uid 0.06
No log output captured.
Passed suites/acl/deladd_test.py::test_allow_delete_access_not_to_dynamic_group 0.06
No log output captured.
Passed suites/acl/enhanced_aci_modrnd_test.py::test_enhanced_aci_modrnd 0.18
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO  tests.suites.acl.enhanced_aci_modrnd_test:enhanced_aci_modrnd_test.py:32 Add a container: ou=test_ou_1,dc=example,dc=com INFO  tests.suites.acl.enhanced_aci_modrnd_test:enhanced_aci_modrnd_test.py:38 Add a container: ou=test_ou_2,dc=example,dc=com INFO  tests.suites.acl.enhanced_aci_modrnd_test:enhanced_aci_modrnd_test.py:44 Add a user: cn=test_user,ou=test_ou_1,dc=example,dc=com INFO  tests.suites.acl.enhanced_aci_modrnd_test:enhanced_aci_modrnd_test.py:58 Add an ACI 'allow (all)' by cn=test_user,ou=test_ou_1,dc=example,dc=com to the ou=test_ou_1,dc=example,dc=com INFO  tests.suites.acl.enhanced_aci_modrnd_test:enhanced_aci_modrnd_test.py:62 Add an ACI 'allow (all)' by cn=test_user,ou=test_ou_1,dc=example,dc=com to the ou=test_ou_2,dc=example,dc=com
-------------------------------Captured log call--------------------------------
INFO  tests.suites.acl.enhanced_aci_modrnd_test:enhanced_aci_modrnd_test.py:93 Bind as cn=test_user,ou=test_ou_1,dc=example,dc=com INFO  tests.suites.acl.enhanced_aci_modrnd_test:enhanced_aci_modrnd_test.py:97 User MODRDN operation from ou=test_ou_1,dc=example,dc=com to ou=test_ou_2,dc=example,dc=com INFO  tests.suites.acl.enhanced_aci_modrnd_test:enhanced_aci_modrnd_test.py:103 Check there is no user in ou=test_ou_1,dc=example,dc=com INFO  tests.suites.acl.enhanced_aci_modrnd_test:enhanced_aci_modrnd_test.py:109 Check there is our user in ou=test_ou_2,dc=example,dc=com
Passed suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_five 0.06
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_six 0.05
No log output captured.
Passed suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_seven 0.03
No log output captured.
Passed suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_eight 0.03
No log output captured.
Passed suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_nine 0.03
No log output captured.
Passed suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_ten 0.05
No log output captured.
Passed suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_eleven 0.04
No log output captured.
Passed suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_twelve 0.02
No log output captured.
Passed suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_fourteen 0.07
No log output captured.
Passed suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_fifteen 0.06
No log output captured.
Passed suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_sixteen 0.03
No log output captured.
Passed suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_seventeen 0.03
No log output captured.
Passed suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_eighteen 0.03
No log output captured.
Passed suites/acl/globalgroup_test.py::test_caching_changes 0.08
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/acl/globalgroup_test.py::test_deny_group_member_all_rights_to_user 0.07
No log output captured.
Passed suites/acl/globalgroup_test.py::test_deny_group_member_all_rights_to_group_members 0.14
No log output captured.
Passed suites/acl/globalgroup_test.py::test_deeply_nested_groups_aci_denial 0.05
No log output captured.
Passed suites/acl/globalgroup_test.py::test_deeply_nested_groups_aci_denial_two 0.02
No log output captured.
Passed suites/acl/globalgroup_test.py::test_deeply_nested_groups_aci_allow 0.02
No log output captured.
Passed suites/acl/globalgroup_test.py::test_deeply_nested_groups_aci_allow_two 0.03
No log output captured.
Passed suites/acl/globalgroup_test.py::test_undefined_in_group_eval 0.03
No log output captured.
Passed suites/acl/globalgroup_test.py::test_undefined_in_group_eval_two 0.02
No log output captured.
Passed suites/acl/globalgroup_test.py::test_undefined_in_group_eval_three 0.02
No log output captured.
Passed suites/acl/globalgroup_test.py::test_undefined_in_group_eval_four 0.06
No log output captured.
Passed suites/acl/keywords_part2_test.py::test_ip_keyword_test_noip_cannot 0.09
No log output captured.
Passed suites/acl/keywords_part2_test.py::test_user_can_access_the_data_at_any_time 0.08
No log output captured.
Passed suites/acl/keywords_part2_test.py::test_user_can_access_the_data_only_in_the_morning 0.12
No log output captured.
Passed suites/acl/keywords_part2_test.py::test_user_can_access_the_data_only_in_the_afternoon 0.07
No log output captured.
Passed suites/acl/keywords_part2_test.py::test_timeofday_keyword 1.15
No log output captured.
Passed suites/acl/keywords_part2_test.py::test_dayofweek_keyword_test_everyday_can_access 0.09
No log output captured.
Passed suites/acl/keywords_part2_test.py::test_dayofweek_keyword_today_can_access 0.10
No log output captured.
Passed suites/acl/keywords_part2_test.py::test_user_cannot_access_the_data_at_all 0.10
No log output captured.
Passed suites/acl/keywords_test.py::test_user_binds_with_a_password_and_can_access_the_data 0.05
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/acl/keywords_test.py::test_user_binds_with_a_bad_password_and_cannot_access_the_data 0.01
No log output captured.
Passed suites/acl/keywords_test.py::test_anonymous_user_cannot_access_the_data 0.03
No log output captured.
Passed suites/acl/keywords_test.py::test_authenticated_but_has_no_rigth_on_the_data 0.05
No log output captured.
Passed suites/acl/keywords_test.py::test_the_bind_client_is_accessing_the_directory 0.01
No log output captured.
Passed suites/acl/keywords_test.py::test_users_binds_with_a_password_and_can_access_the_data 0.02
No log output captured.
Passed suites/acl/keywords_test.py::test_user_binds_without_any_password_and_cannot_access_the_data 0.02
No log output captured.
Passed suites/acl/keywords_test.py::test_user_can_access_the_data_when_connecting_from_any_machine 0.04
No log output captured.
Passed suites/acl/keywords_test.py::test_user_can_access_the_data_when_connecting_from_internal_ds_network_only 0.05
No log output captured.
Passed suites/acl/keywords_test.py::test_user_can_access_the_data_when_connecting_from_some_network_only 0.04
No log output captured.
Passed suites/acl/keywords_test.py::test_from_an_unauthorized_network 0.03
No log output captured.
Passed suites/acl/keywords_test.py::test_user_cannot_access_the_data_when_connecting_from_an_unauthorized_network_2 0.02
No log output captured.
Passed suites/acl/keywords_test.py::test_user_cannot_access_the_data_if_not_from_a_certain_domain 0.05
No log output captured.
Passed suites/acl/keywords_test.py::test_dnsalias_keyword_test_nodns_cannot 0.20
No log output captured.
Passed suites/acl/keywords_test.py::test_user_can_access_from_ipv4_or_ipv6_address[127.0.0.1] 0.04
No log output captured.
Passed suites/acl/keywords_test.py::test_user_can_access_from_ipv4_or_ipv6_address[[::1]] 0.01
No log output captured.
Passed suites/acl/misc_test.py::test_accept_aci_in_addition_to_acl 0.34
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/acl/misc_test.py::test_more_then_40_acl_will_crash_slapd 0.58
-------------------------------Captured log setup-------------------------------
INFO  lib389:misc_test.py:76 Exception (expected): ALREADY_EXISTS
Passed suites/acl/misc_test.py::test_search_access_should_not_include_read_access 0.01
-------------------------------Captured log setup-------------------------------
INFO  lib389:misc_test.py:76 Exception (expected): ALREADY_EXISTS
Passed suites/acl/misc_test.py::test_only_allow_some_targetattr 0.06
-------------------------------Captured log setup-------------------------------
INFO  lib389:misc_test.py:76 Exception (expected): ALREADY_EXISTS
Passed suites/acl/misc_test.py::test_only_allow_some_targetattr_two 0.34
-------------------------------Captured log setup-------------------------------
INFO  lib389:misc_test.py:76 Exception (expected): ALREADY_EXISTS
Passed suites/acl/misc_test.py::test_memberurl_needs_to_be_normalized 0.11
-------------------------------Captured log setup-------------------------------
INFO  lib389:misc_test.py:76 Exception (expected): ALREADY_EXISTS
Passed suites/acl/misc_test.py::test_greater_than_200_acls_can_be_created 3.98
-------------------------------Captured log setup-------------------------------
INFO  lib389:misc_test.py:76 Exception (expected): ALREADY_EXISTS
Passed suites/acl/misc_test.py::test_server_bahaves_properly_with_very_long_attribute_names 0.06
-------------------------------Captured log setup-------------------------------
INFO  lib389:misc_test.py:76 Exception (expected): ALREADY_EXISTS
Passed suites/acl/misc_test.py::test_do_bind_as_201_distinct_users 139.27
-------------------------------Captured log setup-------------------------------
INFO  lib389:misc_test.py:76 Exception (expected): ALREADY_EXISTS
Passed suites/acl/modify_test.py::test_allow_write_access_to_targetattr_with_a_single_attribute 0.32
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/acl/modify_test.py::test_allow_write_access_to_targetattr_with_multiple_attibutes 0.06
No log output captured.
Passed suites/acl/modify_test.py::test_allow_write_access_to_userdn_all 0.11
No log output captured.
Passed suites/acl/modify_test.py::test_allow_write_access_to_userdn_with_wildcards_in_dn 0.05
No log output captured.
Passed suites/acl/modify_test.py::test_allow_write_access_to_userdn_with_multiple_dns 0.18
No log output captured.
Passed suites/acl/modify_test.py::test_allow_write_access_to_target_with_wildcards 0.18
No log output captured.
Passed suites/acl/modify_test.py::test_allow_write_access_to_userdnattr 0.09
No log output captured.
Passed suites/acl/modify_test.py::test_allow_selfwrite_access_to_anyone 0.31
No log output captured.
Passed suites/acl/modify_test.py::test_uniquemember_should_also_be_the_owner 0.22
No log output captured.
Passed suites/acl/modify_test.py::test_aci_with_both_allow_and_deny 0.16
No log output captured.
Passed suites/acl/modify_test.py::test_allow_owner_to_modify_entry 0.09
No log output captured.
Passed suites/acl/modrdn_test.py::test_allow_write_privilege_to_anyone 0.02
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/acl/modrdn_test.py::test_allow_write_privilege_to_dynamic_group_with_scope_set_to_base_in_ldap_url 0.02
No log output captured.
Passed suites/acl/modrdn_test.py::test_write_access_to_naming_atributes 0.04
No log output captured.
Passed suites/acl/modrdn_test.py::test_write_access_to_naming_atributes_two 0.09
No log output captured.
Passed suites/acl/modrdn_test.py::test_access_aci_list_contains_any_deny_rule 0.12
No log output captured.
Passed suites/acl/modrdn_test.py::test_renaming_target_entry 0.06
No log output captured.
Passed suites/acl/repeated_ldap_add_test.py::test_repeated_ldap_add 32.68
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
------------------------------Captured stdout call------------------------------
Entry uid=buser123,ou=BOU,dc=example,dc=com is locked
-------------------------------Captured log call--------------------------------
INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:184 Testing Bug 1347760 - Information disclosure via repeated use of LDAP ADD operation, etc. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:186 Disabling accesslog logbuffering INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:189 Bind as {cn=Directory Manager,password} INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:192 Adding ou=BOU a bind user belongs to. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:197 Adding a bind user. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:204 Adding a test user. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:211 Deleting aci in dc=example,dc=com. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:214 While binding as DM, acquire an access log path and instance dir INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:220 Bind case 1. the bind user has no rights to read the entry itself, bind should be successful. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:221 Bind as {uid=buser123,ou=BOU,dc=example,dc=com,buser123} who has no access rights. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:229 Access log path: /var/log/dirsrv/slapd-standalone1/access INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:231 Bind case 2-1. the bind user does not exist, bind should fail with error INVALID_CREDENTIALS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:233 Bind as {uid=bogus,dc=example,dc=com,bogus} who does not exist. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:237 Exception (expected): INVALID_CREDENTIALS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:238 Desc Invalid credentials INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:246 Cause found - [28/Oct/2020:20:21:44.556486419 -0400] conn=1 op=11 RESULT err=49 tag=97 nentries=0 wtime=0.000209991 optime=0.004933122 etime=0.005142007 - No such entry INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:249 Bind case 2-2. the bind user's suffix does not exist, bind should fail with error INVALID_CREDENTIALS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:251 Bind as {uid=bogus,ou=people,dc=bogus,bogus} who does not exist. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:260 Cause found - [28/Oct/2020:20:21:45.574553105 -0400] conn=1 op=12 RESULT err=49 tag=97 nentries=0 wtime=0.000161594 optime=0.013979587 etime=0.014134008 - No suffix for bind dn found INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:263 Bind case 2-3. the bind user's password is wrong, bind should fail with error INVALID_CREDENTIALS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:265 Bind as {uid=buser123,ou=BOU,dc=example,dc=com,bogus} who does not exist. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:269 Exception (expected): INVALID_CREDENTIALS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:270 Desc Invalid credentials INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:278 Cause found - [28/Oct/2020:20:21:46.612567727 -0400] conn=1 op=13 RESULT err=49 tag=97 nentries=0 wtime=0.000184054 optime=0.034306213 etime=0.034482997 - Invalid credentials INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:281 Adding aci for uid=buser123,ou=BOU,dc=example,dc=com to ou=BOU,dc=example,dc=com. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:283 aci: (targetattr="*")(version 3.0; acl "buser123"; allow(all) userdn = "ldap:///uid=buser123,ou=BOU,dc=example,dc=com";) INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:284 Bind as {cn=Directory Manager,password} INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:289 Bind case 3. the bind user has the right to read the entry itself, bind should be successful. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:290 Bind as {uid=buser123,ou=BOU,dc=example,dc=com,buser123} which should be ok. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:293 The following operations are against the subtree the bind user uid=buser123,ou=BOU,dc=example,dc=com has no rights. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:297 Search case 1. the bind user has no rights to read the search entry, it should return no search results with <class 'ldap.SUCCESS'> INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Searching existing entry uid=tuser0,ou=people,dc=example,dc=com, which should be ok. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:141 Search should return none INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:303 Search case 2-1. the search entry does not exist, the search should return no search results with SUCCESS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Searching non-existing entry uid=bogus,dc=example,dc=com, which should be ok. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:141 Search should return none INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:309 Search case 2-2. the search entry does not exist, the search should return no search results with SUCCESS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Searching non-existing entry uid=bogus,ou=people,dc=example,dc=com, which should be ok. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:141 Search should return none INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:316 Add case 1. the bind user has no rights AND the adding entry exists, it should fail with INSUFFICIENT_ACCESS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Adding existing entry uid=tuser0,ou=people,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:322 Add case 2-1. the bind user has no rights AND the adding entry does not exist, it should fail with INSUFFICIENT_ACCESS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Adding non-existing entry uid=bogus,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:328 Add case 2-2. the bind user has no rights AND the adding entry does not exist, it should fail with INSUFFICIENT_ACCESS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Adding non-existing entry uid=bogus,ou=people,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:335 Modify case 1. the bind user has no rights AND the modifying entry exists, it should fail with INSUFFICIENT_ACCESS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Modifying existing entry uid=tuser0,ou=people,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:341 Modify case 2-1. the bind user has no rights AND the modifying entry does not exist, it should fail with INSUFFICIENT_ACCESS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Modifying non-existing entry uid=bogus,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:347 Modify case 2-2. the bind user has no rights AND the modifying entry does not exist, it should fail with INSUFFICIENT_ACCESS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Modifying non-existing entry uid=bogus,ou=people,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:354 Modrdn case 1. the bind user has no rights AND the renaming entry exists, it should fail with INSUFFICIENT_ACCESS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Renaming existing entry uid=tuser0,ou=people,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:360 Modrdn case 2-1. the bind user has no rights AND the renaming entry does not exist, it should fail with INSUFFICIENT_ACCESS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Renaming non-existing entry uid=bogus,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:366 Modrdn case 2-2. the bind user has no rights AND the renaming entry does not exist, it should fail with INSUFFICIENT_ACCESS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Renaming non-existing entry uid=bogus,ou=people,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:372 Modrdn case 3. the bind user has no rights AND the node moving an entry to exists, it should fail with INSUFFICIENT_ACCESS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Moving to existing superior ou=groups,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:378 Modrdn case 4-1. the bind user has no rights AND the node moving an entry to does not, it should fail with INSUFFICIENT_ACCESS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Moving to non-existing superior ou=OU,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:384 Modrdn case 4-2. the bind user has no rights AND the node moving an entry to does not, it should fail with INSUFFICIENT_ACCESS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Moving to non-existing superior ou=OU,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:391 Delete case 1. the bind user has no rights AND the deleting entry exists, it should fail with INSUFFICIENT_ACCESS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Deleting existing entry uid=tuser0,ou=people,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:397 Delete case 2-1. the bind user has no rights AND the deleting entry does not exist, it should fail with INSUFFICIENT_ACCESS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Deleting non-existing entry uid=bogus,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:403 Delete case 2-2. the bind user has no rights AND the deleting entry does not exist, it should fail with INSUFFICIENT_ACCESS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Deleting non-existing entry uid=bogus,ou=people,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:407 EXTRA: Check no regressions INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:408 Adding aci for uid=buser123,ou=BOU,dc=example,dc=com to dc=example,dc=com. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:410 Bind as {cn=Directory Manager,password} INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:415 Bind as {uid=buser123,ou=BOU,dc=example,dc=com,buser123}. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:425 Search case. the search entry does not exist, the search should fail with NO_SUCH_OBJECT INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Searching non-existing entry uid=bogus,ou=people,dc=example,dc=com, which should fail with NO_SUCH_OBJECT. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): NO_SUCH_OBJECT INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc No such object INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:431 Add case. the adding entry already exists, it should fail with ALREADY_EXISTS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Adding existing entry uid=tuser0,ou=people,dc=example,dc=com, which should fail with ALREADY_EXISTS. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): ALREADY_EXISTS INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Already exists INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:436 Modify case. the modifying entry does not exist, it should fail with NO_SUCH_OBJECT INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Modifying non-existing entry uid=bogus,dc=example,dc=com, which should fail with NO_SUCH_OBJECT. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): NO_SUCH_OBJECT INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc No such object INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:441 Modrdn case 1. the renaming entry does not exist, it should fail with NO_SUCH_OBJECT INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Renaming non-existing entry uid=bogus,dc=example,dc=com, which should fail with NO_SUCH_OBJECT. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): NO_SUCH_OBJECT INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc No such object INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:446 Modrdn case 2. the node moving an entry to does not, it should fail with NO_SUCH_OBJECT INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Moving to non-existing superior ou=OU,dc=example,dc=com, which should fail with NO_SUCH_OBJECT. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): NO_SUCH_OBJECT INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc No such object INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:451 Delete case. the deleting entry does not exist, it should fail with NO_SUCH_OBJECT INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Deleting non-existing entry uid=bogus,dc=example,dc=com, which should fail with NO_SUCH_OBJECT. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): NO_SUCH_OBJECT INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc No such object INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:454 Inactivate uid=buser123,ou=BOU,dc=example,dc=com INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:461 ['/usr/sbin/dsidm', 'standalone1', '-b', 'dc=example,dc=com', 'account', 'lock', 'uid=buser123,ou=BOU,dc=example,dc=com'] INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:465 Bind as {uid=buser123,ou=BOU,dc=example,dc=com,buser123} which should fail with UNWILLING_TO_PERFORM. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:469 Exception (expected): UNWILLING_TO_PERFORM INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:470 Desc Server is unwilling to perform INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:473 Bind as {uid=buser123,ou=BOU,dc=example,dc=com,bogus} which should fail with UNWILLING_TO_PERFORM. INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:477 Exception (expected): UNWILLING_TO_PERFORM INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:478 Desc Server is unwilling to perform INFO  tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:481 SUCCESS
Passed suites/acl/roledn_test.py::test_mod_seealso_positive[(STEVE_ROLE, NESTED_ROLE_TESTER)] 0.05
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/acl/roledn_test.py::test_mod_seealso_positive[(HARRY_ROLE, NESTED_ROLE_TESTER)] 0.04
No log output captured.
Passed suites/acl/roledn_test.py::test_mod_seealso_positive[(MARY_ROLE, NOT_RULE_ACCESS)] 0.04
No log output captured.
Passed suites/acl/roledn_test.py::test_mod_seealso_positive[(STEVE_ROLE, OR_RULE_ACCESS)] 0.04
No log output captured.
Passed suites/acl/roledn_test.py::test_mod_seealso_positive[(HARRY_ROLE, OR_RULE_ACCESS)] 0.04
No log output captured.
Passed suites/acl/roledn_test.py::test_mod_seealso_positive[(STEVE_ROLE, ALL_ACCESS)] 0.04
No log output captured.
Passed suites/acl/roledn_test.py::test_mod_seealso_positive[(HARRY_ROLE, ALL_ACCESS)] 0.04
No log output captured.
Passed suites/acl/roledn_test.py::test_mod_seealso_positive[(MARY_ROLE, ALL_ACCESS)] 0.04
No log output captured.
Passed suites/acl/roledn_test.py::test_mod_seealso_negative[(MARY_ROLE, NESTED_ROLE_TESTER)] 0.03
No log output captured.
Passed suites/acl/roledn_test.py::test_mod_seealso_negative[(STEVE_ROLE, NOT_RULE_ACCESS)] 0.05
No log output captured.
Passed suites/acl/roledn_test.py::test_mod_seealso_negative[(HARRY_ROLE, NOT_RULE_ACCESS)] 0.05
No log output captured.
Passed suites/acl/roledn_test.py::test_mod_seealso_negative[(MARY_ROLE , OR_RULE_ACCESS)] 0.05
No log output captured.
Passed suites/acl/roledn_test.py::test_mod_anonseealso_positive[NOT_RULE_ACCESS] 0.01
No log output captured.
Passed suites/acl/roledn_test.py::test_mod_anonseealso_positive[ALL_ACCESS] 0.01
No log output captured.
Passed suites/acl/roledn_test.py::test_mod_anonseealso_negaive[NESTED_ROLE_TESTER] 0.02
No log output captured.
Passed suites/acl/roledn_test.py::test_mod_anonseealso_negaive[OR_RULE_ACCESS] 0.02
No log output captured.
Passed suites/acl/search_real_part2_test.py::test_deny_all_access_with__target_set_on_non_leaf 0.41
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/acl/search_real_part2_test.py::test_deny_all_access_with__target_set_on_wildcard_non_leaf 0.45
No log output captured.
Passed suites/acl/search_real_part2_test.py::test_deny_all_access_with__target_set_on_wildcard_leaf 0.45
No log output captured.
Passed suites/acl/search_real_part2_test.py::test_deny_all_access_with_targetfilter_using_equality_search 0.21
No log output captured.
Passed suites/acl/search_real_part2_test.py::test_deny_all_access_with_targetfilter_using_equality_search_two 0.72
No log output captured.
Passed suites/acl/search_real_part2_test.py::test_deny_all_access_with_targetfilter_using_substring_search 0.19
No log output captured.
Passed suites/acl/search_real_part2_test.py::test_deny_all_access_with_targetfilter_using_substring_search_two 1.26
No log output captured.
Passed suites/acl/search_real_part2_test.py::test_deny_all_access_with_targetfilter_using_boolean_or_of_two_equality_search 0.15
No log output captured.
Passed suites/acl/search_real_part2_test.py::test_deny_all_access_to__userdn_two 0.47
No log output captured.
Passed suites/acl/search_real_part2_test.py::test_deny_all_access_with_userdn 0.51
No log output captured.
Passed suites/acl/search_real_part2_test.py::test_deny_all_access_with_targetfilter_using_presence_search 0.13
No log output captured.
Passed suites/acl/search_real_part3_test.py::test_deny_search_access_to_userdn_with_ldap_url 0.39
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/acl/search_real_part3_test.py::test_deny_search_access_to_userdn_with_ldap_url_two 0.61
No log output captured.
Passed suites/acl/search_real_part3_test.py::test_deny_search_access_to_userdn_with_ldap_url_matching_all_users 0.57
No log output captured.
Passed suites/acl/search_real_part3_test.py::test_deny_read_access_to_a_dynamic_group 0.47
No log output captured.
Passed suites/acl/search_real_part3_test.py::test_deny_read_access_to_dynamic_group_with_host_port_set_on_ldap_url 0.65
No log output captured.
Passed suites/acl/search_real_part3_test.py::test_deny_read_access_to_dynamic_group_with_scope_set_to_one_in_ldap_url 0.44
No log output captured.
Passed suites/acl/search_real_part3_test.py::test_deny_read_access_to_dynamic_group_two 0.65
No log output captured.
Passed suites/acl/search_real_part3_test.py::test_deny_access_to_group_should_deny_access_to_all_uniquemember 0.59
No log output captured.
Passed suites/acl/search_real_part3_test.py::test_entry_with_lots_100_attributes 9.70
No log output captured.
Passed suites/acl/search_real_part3_test.py::test_groupdnattr_value_is_another_group 0.13
No log output captured.
Passed suites/acl/search_real_test.py::test_deny_all_access_with_target_set 0.17
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/acl/search_real_test.py::test_deny_all_access_to_a_target_with_wild_card 0.30
No log output captured.
Passed suites/acl/search_real_test.py::test_deny_all_access_without_a_target_set 1.30
No log output captured.
Passed suites/acl/search_real_test.py::test_deny_read_search_and_compare_access_with_target_and_targetattr_set 0.92
No log output captured.
Passed suites/acl/search_real_test.py::test_deny_read_access_to_multiple_groupdns 0.89
No log output captured.
Passed suites/acl/search_real_test.py::test_deny_all_access_to_userdnattr 0.30
No log output captured.
Passed suites/acl/search_real_test.py::test_deny_all_access_with__target_set 0.50
No log output captured.
Passed suites/acl/search_real_test.py::test_deny_all_access_with__targetattr_set 1.26
No log output captured.
Passed suites/acl/search_real_test.py::test_deny_all_access_with_targetattr_set 0.81
No log output captured.
Passed suites/acl/selfdn_permissions_test.py::test_selfdn_permission_add 0.51
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389:selfdn_permissions_test.py:58 Add OCticket47653 that allows 'member' attribute INFO  lib389:selfdn_permissions_test.py:63 Add cn=bind_entry, dc=example,dc=com
-------------------------------Captured log call--------------------------------
INFO  lib389:selfdn_permissions_test.py:106 ######################### ADD ###################### INFO  lib389:selfdn_permissions_test.py:109 Bind as cn=bind_entry, dc=example,dc=com INFO  lib389:selfdn_permissions_test.py:139 Try to add Add cn=test_entry, dc=example,dc=com (aci is missing): dn: cn=test_entry, dc=example,dc=com cn: test_entry member: cn=bind_entry, dc=example,dc=com objectclass: top objectclass: person objectclass: OCticket47653 postalAddress: here postalCode: 1234 sn: test_entry INFO  lib389:selfdn_permissions_test.py:143 Exception (expected): INSUFFICIENT_ACCESS INFO  lib389:selfdn_permissions_test.py:147 Bind as cn=Directory Manager and add the ADD SELFDN aci INFO  lib389:selfdn_permissions_test.py:159 Bind as cn=bind_entry, dc=example,dc=com INFO  lib389:selfdn_permissions_test.py:164 Try to add Add cn=test_entry, dc=example,dc=com (member is missing) INFO  lib389:selfdn_permissions_test.py:172 Exception (expected): INSUFFICIENT_ACCESS INFO  lib389:selfdn_permissions_test.py:178 Try to add Add cn=test_entry, dc=example,dc=com (with several member values) INFO  lib389:selfdn_permissions_test.py:181 Exception (expected): INSUFFICIENT_ACCESS INFO  lib389:selfdn_permissions_test.py:184 Try to add Add cn=test_entry, dc=example,dc=com should be successful
Passed suites/acl/selfdn_permissions_test.py::test_selfdn_permission_search 0.34
-------------------------------Captured log call--------------------------------
INFO  lib389:selfdn_permissions_test.py:205 ######################### SEARCH ###################### INFO  lib389:selfdn_permissions_test.py:207 Bind as cn=bind_entry, dc=example,dc=com INFO  lib389:selfdn_permissions_test.py:211 Try to search cn=test_entry, dc=example,dc=com (aci is missing) INFO  lib389:selfdn_permissions_test.py:216 Bind as cn=Directory Manager and add the READ/SEARCH SELFDN aci INFO  lib389:selfdn_permissions_test.py:229 Bind as cn=bind_entry, dc=example,dc=com INFO  lib389:selfdn_permissions_test.py:233 Try to search cn=test_entry, dc=example,dc=com should be successful
Passed suites/acl/selfdn_permissions_test.py::test_selfdn_permission_modify 0.51
-------------------------------Captured log call--------------------------------
INFO  lib389:selfdn_permissions_test.py:256 Bind as cn=bind_entry, dc=example,dc=com INFO  lib389:selfdn_permissions_test.py:259 ######################### MODIFY ###################### INFO  lib389:selfdn_permissions_test.py:263 Try to modify cn=test_entry, dc=example,dc=com (aci is missing) INFO  lib389:selfdn_permissions_test.py:267 Exception (expected): INSUFFICIENT_ACCESS INFO  lib389:selfdn_permissions_test.py:271 Bind as cn=Directory Manager and add the WRITE SELFDN aci INFO  lib389:selfdn_permissions_test.py:284 Bind as cn=bind_entry, dc=example,dc=com INFO  lib389:selfdn_permissions_test.py:288 Try to modify cn=test_entry, dc=example,dc=com. It should succeeds
Passed suites/acl/selfdn_permissions_test.py::test_selfdn_permission_delete 0.24
-------------------------------Captured log call--------------------------------
INFO  lib389:selfdn_permissions_test.py:314 ######################### DELETE ###################### INFO  lib389:selfdn_permissions_test.py:317 Bind as cn=bind_entry, dc=example,dc=com INFO  lib389:selfdn_permissions_test.py:322 Try to delete cn=test_entry, dc=example,dc=com (aci is missing) INFO  lib389:selfdn_permissions_test.py:325 Exception (expected): INSUFFICIENT_ACCESS INFO  lib389:selfdn_permissions_test.py:329 Bind as cn=Directory Manager and add the READ/SEARCH SELFDN aci INFO  lib389:selfdn_permissions_test.py:341 Bind as cn=bind_entry, dc=example,dc=com INFO  lib389:selfdn_permissions_test.py:345 Try to delete cn=test_entry, dc=example,dc=com should be successful
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_1] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_2] 0.17
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_3] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_4] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_5] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_6] 0.18
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_7] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_8] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_9] 0.17
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_10] 0.16
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_11] 0.17
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_12] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_13] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_14] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_15] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_16] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_17] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_19] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_21] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_22] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_23] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_Missing_acl_mispel] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_Missing_acl_string] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_Wrong_version_string] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_Missing_version_string] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_Authenticate_statement] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_Multiple_targets] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_Target_set_to_self] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_target_set_with_ldap_instead_of_ldap] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_target_set_with_more_than_three] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_target_set_with_less_than_three] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_bind_rule_set_with_less_than_three] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_Use_semicolon_instead_of_comma_in_permission] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_Use_double_equal_instead_of_equal_in_the_target] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_use_double_equal_instead_of_equal_in_user_and_group_access] 0.03
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_donot_cote_the_name_of_the_aci] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_extra_parentheses_case_1] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_extra_parentheses_case_2] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_extra_parentheses_case_3] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_no_semicolon_at_the_end_of_the_aci] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_a_character_different_of_a_semicolon_at_the_end_of_the_aci] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_bad_filter] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_Use_double_equal_instead_of_equal_in_the_targattrfilters] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_aci_invalid_syntax[test_Use_double_equal_instead_of_equal_inside_the_targattrfilters] 0.02
No log output captured.
Passed suites/acl/syntax_test.py::test_target_set_above_the_entry_test 0.02
No log output captured.
Passed suites/acl/userattr_test.py::test_mod_see_also_positive[(CAN,ROLEDNACCESS)] 0.04
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/acl/userattr_test.py::test_mod_see_also_positive[(CAN,USERDNACCESS)] 0.01
No log output captured.
Passed suites/acl/userattr_test.py::test_mod_see_also_positive[(CAN,GROUPDNACCESS)] 0.01
No log output captured.
Passed suites/acl/userattr_test.py::test_mod_see_also_positive[(CAN,LDAPURLACCESS)] 0.02
No log output captured.
Passed suites/acl/userattr_test.py::test_mod_see_also_positive[(CAN,ATTRNAMEACCESS)] 0.27
No log output captured.
Passed suites/acl/userattr_test.py::test_mod_see_also_positive[(LEVEL_0, OU_2)] 0.04
No log output captured.
Passed suites/acl/userattr_test.py::test_mod_see_also_positive[(LEVEL_1,ANCESTORS)] 0.04
No log output captured.
Passed suites/acl/userattr_test.py::test_mod_see_also_positive[(LEVEL_2,GRANDPARENTS)] 0.04
No log output captured.
Passed suites/acl/userattr_test.py::test_mod_see_also_positive[(LEVEL_4,OU_2)] 0.04
No log output captured.
Passed suites/acl/userattr_test.py::test_mod_see_also_positive[(LEVEL_4, ANCESTORS)] 0.01
No log output captured.
Passed suites/acl/userattr_test.py::test_mod_see_also_positive[(LEVEL_4,GRANDPARENTS)] 0.01
No log output captured.
Passed suites/acl/userattr_test.py::test_mod_see_also_positive[(LEVEL_4,PARENTS)] 0.02
No log output captured.
Passed suites/acl/userattr_test.py::test_mod_see_also_positive[(LEVEL_4,CHILDREN)] 0.01
No log output captured.
Passed suites/acl/userattr_test.py::test_mod_see_also_negative[(CANNOT,ROLEDNACCESS)] 0.05
No log output captured.
Passed suites/acl/userattr_test.py::test_mod_see_also_negative[(CANNOT,USERDNACCESS)] 0.03
No log output captured.
Passed suites/acl/userattr_test.py::test_mod_see_also_negative[(CANNOT,GROUPDNACCESS)] 0.04
No log output captured.
Passed suites/acl/userattr_test.py::test_mod_see_also_negative[(CANNOT,LDAPURLACCESS)] 0.03
No log output captured.
Passed suites/acl/userattr_test.py::test_mod_see_also_negative[(CANNOT,ATTRNAMEACCESS)] 0.03
No log output captured.
Passed suites/acl/userattr_test.py::test_mod_see_also_negative[(LEVEL_0, ANCESTORS)] 0.05
No log output captured.
Passed suites/acl/userattr_test.py::test_mod_see_also_negative[(LEVEL_0,GRANDPARENTS)] 0.03
No log output captured.
Passed suites/acl/userattr_test.py::test_mod_see_also_negative[(LEVEL_0,PARENTS)] 0.03
No log output captured.
Passed suites/acl/userattr_test.py::test_mod_see_also_negative[(LEVEL_0,CHILDREN)] 0.03
No log output captured.
Passed suites/acl/userattr_test.py::test_mod_see_also_negative[(LEVEL_2,PARENTS)] 0.05
No log output captured.
Passed suites/acl/userattr_test.py::test_mod_see_also_negative[(LEVEL_4,GRANDSONS)] 0.05
No log output captured.
Passed suites/acl/userattr_test.py::test_last_three[uid=Ananda Borah,ou=Accounting,dc=example,dc=com-uid=USERDNACCESS,ou=Accounting,dc=example,dc=com] 0.06
No log output captured.
Passed suites/acl/userattr_test.py::test_last_three[uid=Ananda Borah,ou=Accounting,dc=example,dc=com-uid=ROLEDNACCESS,ou=Accounting,dc=example,dc=com] 0.03
No log output captured.
Passed suites/acl/userattr_test.py::test_last_three[uid=Ananda Borah,ou=Accounting,dc=example,dc=com-uid=GROUPDNACCESS,ou=Accounting,dc=example,dc=com] 0.03
No log output captured.
Passed suites/acl/valueacl_part2_test.py::test_we_can_search_as_expected 0.26
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/acl/valueacl_part2_test.py::test_we_can_mod_title_as_expected 0.06
No log output captured.
Passed suites/acl/valueacl_part2_test.py::test_modify_with_multiple_filters 0.05
No log output captured.
Passed suites/acl/valueacl_part2_test.py::test_denied_by_multiple_filters 0.06
No log output captured.
Passed suites/acl/valueacl_part2_test.py::test_allowed_add_one_attribute 0.04
No log output captured.
Passed suites/acl/valueacl_part2_test.py::test_cannot_add_an_entry_with_attribute_values_we_are_not_allowed_add 0.06
No log output captured.
Passed suites/acl/valueacl_part2_test.py::test_on_modrdn 0.04
No log output captured.
Passed suites/acl/valueacl_part2_test.py::test_on_modrdn_allow 0.05
No log output captured.
Passed suites/acl/valueacl_part2_test.py::test_targattrfilters_keyword 0.09
No log output captured.
Passed suites/acl/valueacl_test.py::test_delete_an_attribute_value_we_are_not_allowed_to_delete 0.07
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/acl/valueacl_test.py::test_donot_allow_write_access_to_title_if_value_is_not_architect 0.06
No log output captured.
Passed suites/acl/valueacl_test.py::test_delete_an_attribute_value_we_are_allowed_to_delete 0.04
No log output captured.
Passed suites/acl/valueacl_test.py::test_delete_an_attribute_value_we_are_not_allowed_to_deleted 0.06
No log output captured.
Passed suites/acl/valueacl_test.py::test_allow_modify_replace 0.07
No log output captured.
Passed suites/acl/valueacl_test.py::test_allow_modify_delete 0.08
No log output captured.
Passed suites/acl/valueacl_test.py::test_replace_an_attribute_if_we_lack 0.07
No log output captured.
Passed suites/acl/valueacl_test.py::test_remove_an_attribute_if_we_have_del_rights_to_all_attr_value 0.05
No log output captured.
Passed suites/acl/valueacl_test.py::test_remove_an_attribute_if_we_donot_have_del_rights_to_all_attr_value 0.06
No log output captured.
Passed suites/acl/valueacl_test.py::test_remove_an_attribute_if_we_have_del_rights_to_all_attr_values 0.05
No log output captured.
Passed suites/acl/valueacl_test.py::test_cantnot_delete_an_entry_with_attribute_values_we_are_not_allowed_delete 0.06
No log output captured.
Passed suites/acl/valueacl_test.py::test_we_can_add_and_delete_an_entry_with_attribute_values_we_are_allowed_add_and_delete 0.06
No log output captured.
Passed suites/acl/valueacl_test.py::test_allow_title 0.06
No log output captured.
Passed suites/acl/valueacl_test.py::test_allow_to_modify 0.05
No log output captured.
Passed suites/acl/valueacl_test.py::test_selfwrite_does_not_confer_write_on_a_targattrfilters_atribute 0.06
No log output captured.
Passed suites/acl/valueacl_test.py::test_selfwrite_continues_to_give_rights_to_attr_in_targetattr_list 0.05
No log output captured.
Passed suites/acl/valueacl_test.py::test_add_an_attribute_value_we_are_allowed_to_add_with_ldapanyone 0.05
No log output captured.
Passed suites/acl/valueacl_test.py::test_hierarchy 0.06
No log output captured.
Passed suites/acl/valueacl_test.py::test_targattrfilters_and_search_permissions_and_that_ldapmodify_works_as_expected 0.04
No log output captured.
Passed suites/acl/valueacl_test.py::test_targattrfilters_and_search_permissions_and_that_ldapmodify_works_as_expected_two 0.11
No log output captured.
Passed suites/attr_encryption/attr_encryption_test.py::test_basic 5.79
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:35 Enable TLS for attribute encryption INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:38 Enables attribute encryption INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:42 Enables attribute encryption for employeeNumber and telephoneNumber INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:46 Add a test user with encrypted attributes
-------------------------------Captured log call--------------------------------
INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:81 Restart the server INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:87 Extracting values of cn from the list of objects in encrypt_attrs INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:88 And appending the cn values in a list INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:93 Check employeenumber encryption is enabled INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:96 Check telephoneNumber encryption is enabled INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:99 Check that encrypted attribute is present for user i.e. telephoneNumber
Passed suites/attr_encryption/attr_encryption_test.py::test_export_import_ciphertext 13.14
------------------------------Captured stderr call------------------------------
ldiffile: /var/lib/dirsrv/slapd-standalone1/ldif/export_ciphertext.ldif
-------------------------------Captured log call--------------------------------
INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:127 Export data as ciphertext INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:138 Check that the attribute is present in the exported file INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:139 Check that the encrypted value of attribute is not present in the exported file INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:145 Delete the test user entry with encrypted data INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:148 Import data as ciphertext, which was exported previously INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:159 Check that the data with encrypted attribute is imported properly
Passed suites/attr_encryption/attr_encryption_test.py::test_export_import_plaintext 14.61
------------------------------Captured stderr call------------------------------
[28/Oct/2020:20:25:40.300139136 -0400] - WARN - Security Initialization - /tmp is not a private namespace. pem files not exported there [28/Oct/2020:20:25:40.306222213 -0400] - INFO - slapd_extract_cert - CA CERT NAME: Self-Signed-CA [28/Oct/2020:20:25:40.308696774 -0400] - ERR - slapd_extract_cert - Unable to open "/tmp/slapd-standalone1/Self-Signed-CA.pem" for writing (-5950, 2). [28/Oct/2020:20:25:40.312521635 -0400] - WARN - Security Initialization - SSL alert: Sending pin request to SVRCore. You may need to run systemd-tty-ask-password-agent to provide the password. [28/Oct/2020:20:25:40.509054764 -0400] - INFO - slapd_extract_cert - SERVER CERT NAME: Server-Cert [28/Oct/2020:20:25:40.512102317 -0400] - WARN - Security Initialization - /tmp is not a private namespace. pem files not exported there [28/Oct/2020:20:25:40.515092707 -0400] - WARN - Security Initialization - /tmp is not a private namespace. pem files not exported there ldiffile: /var/lib/dirsrv/slapd-standalone1/ldif/export_plaintext.ldif [28/Oct/2020:20:25:47.088314244 -0400] - WARN - Security Initialization - /tmp is not a private namespace. pem files not exported there [28/Oct/2020:20:25:47.094671929 -0400] - INFO - slapd_extract_cert - CA CERT NAME: Self-Signed-CA [28/Oct/2020:20:25:47.097532046 -0400] - ERR - slapd_extract_cert - Unable to open "/tmp/slapd-standalone1/Self-Signed-CA.pem" for writing (-5950, 2). [28/Oct/2020:20:25:47.101793165 -0400] - WARN - Security Initialization - SSL alert: Sending pin request to SVRCore. You may need to run systemd-tty-ask-password-agent to provide the password. [28/Oct/2020:20:25:47.304428702 -0400] - INFO - slapd_extract_cert - SERVER CERT NAME: Server-Cert [28/Oct/2020:20:25:47.307417460 -0400] - WARN - Security Initialization - /tmp is not a private namespace. pem files not exported there [28/Oct/2020:20:25:47.310462055 -0400] - WARN - Security Initialization - /tmp is not a private namespace. pem files not exported there
-------------------------------Captured log call--------------------------------
INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:189 Export data as plain text INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:200 Check that the attribute is present in the exported file INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:201 Check that the plain text value of the encrypted attribute is present in the exported file INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:205 Delete the test user entry with encrypted data INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:208 Import data as plain text, which was exported previously INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:219 Check that the attribute is imported properly
Passed suites/attr_encryption/attr_encryption_test.py::test_attr_encryption_unindexed 5.67
------------------------------Captured stderr call------------------------------
ldiffile: /var/lib/dirsrv/slapd-standalone1/ldif/emp_num_ciphertext.ldif
-------------------------------Captured log call--------------------------------
INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:242 Export data as cipher text INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:253 Check that the attribute is present in the exported file INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:254 Check that the encrypted value of attribute is not present in the exported file
Passed suites/attr_encryption/attr_encryption_test.py::test_attr_encryption_multiple_backends 8.87
------------------------------Captured stderr call------------------------------
ldiffile: /var/lib/dirsrv/slapd-standalone1/ldif/export_db1.ldif ldiffile: /var/lib/dirsrv/slapd-standalone1/ldif/export_db2.ldif
-------------------------------Captured log call--------------------------------
INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:287 Add two test backends INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:307 Enables attribute encryption for telephoneNumber in test_backend1 INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:312 Enables attribute encryption for employeeNumber in test_backend2 INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:317 Add a test user with encrypted attributes in both backends INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:326 Export data as ciphertext from both backends INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:343 Check that the attribute is present in the exported file in db1 INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:344 Check that the encrypted value of attribute is not present in the exported file in db1 INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:350 Check that the attribute is present in the exported file in db2 INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:351 Check that the encrypted value of attribute is not present in the exported file in db2 INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:357 Delete test backends
Passed suites/attr_encryption/attr_encryption_test.py::test_attr_encryption_backends 8.96
------------------------------Captured stderr call------------------------------
ldiffile: /var/lib/dirsrv/slapd-standalone1/ldif/export_db1.ldif ldiffile: /var/lib/dirsrv/slapd-standalone1/ldif/export_db2.ldif
-------------------------------Captured log call--------------------------------
INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:386 Add two test backends INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:405 Enables attribute encryption for telephoneNumber in test_backend1 INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:410 Add a test user with telephoneNumber in both backends INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:419 Export data as ciphertext from both backends INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:436 Check that the attribute is present in the exported file in db1 INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:437 Check that the encrypted value of attribute is not present in the exported file in db1 INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:443 Check that the attribute is present in the exported file in db2 INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:444 Check that the value of attribute is also present in the exported file in db2 INFO  tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:450 Delete test backends
Passed suites/automember_plugin/automember_mod_test.py::test_mods 11.90
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.suites.automember_plugin.automember_mod_test:automember_mod_test.py:135 Test PASSED
Passed suites/automember_plugin/automember_test.py::test_automemberscope 0.00
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/automember_plugin/automember_test.py::test_automemberfilter 0.26
No log output captured.
Passed suites/automember_plugin/automember_test.py::test_adduser 0.05
No log output captured.
Passed suites/automember_plugin/automember_test.py::test_delete_default_group 4.50
No log output captured.
Passed suites/automember_plugin/automember_test.py::test_no_default_group 4.07
No log output captured.
Passed suites/automember_plugin/automember_test.py::test_delete_target_group 5.10
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_disable_the_plug_in 0.02
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology.
Passed suites/automember_plugin/basic_test.py::test_custom_config_area 0.27
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_ability_to_control_behavior_of_modifiers_name 9.29
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_posixaccount_objectclass_automemberdefaultgroup 0.06
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_duplicated_member_attributes_added_when_the_entry_is_re_created 0.13
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_multi_valued_automemberdefaultgroup_for_hostgroups 0.06
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_plugin_creates_member_attributes_of_the_automemberdefaultgroup 0.06
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_multi_valued_automemberdefaultgroup_with_uniquemember 8.54
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_invalid_automembergroupingattr_member 0.29
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_valid_and_invalid_automembergroupingattr 0.11
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_add_regular_expressions_for_user_groups_and_check_for_member_attribute_after_adding_users 0.12
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_matching_gid_role_inclusive_regular_expression[autoMembers_22-5288-5289-Contractor-5291-5292-Contractors] 0.05
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_matching_gid_role_inclusive_regular_expression[autoMembers_21-1161-1162-Contractor-1162-1163-Contractors] 0.05
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_matching_gid_role_inclusive_regular_expression[autoMembers_20-1188-1189-CEO-1191-1192-Contractors] 0.07
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_matching_gid_role_inclusive_regular_expression[autoMembers_15-9288-9289-Manager-9291-9292-Managers] 0.05
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_matching_gid_role_inclusive_regular_expression[autoMembers_14-561-562-Manager-562-563-Managers] 0.05
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_matching_gid_role_inclusive_regular_expression[autoMembers_13-9788-9789-VPEngg-9392-9393-Managers] 0.06
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_gid_and_role_inclusive_exclusive_regular_expression[autoMembers_26-5788-5789-Intern-Contractors-SuffDef1-5] 0.07
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_gid_and_role_inclusive_exclusive_regular_expression[autoMembers_25-9788-9789-Employee-Contractors-Managers-1] 0.04
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_gid_and_role_inclusive_exclusive_regular_expression[autoMembers_24-1110-1111-Employee-Contractors-SuffDef1-5] 0.06
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_gid_and_role_inclusive_exclusive_regular_expression[autoMembers_23-2788-2789-Contractor-Contractors-SuffDef1-5] 0.06
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_gid_and_role_inclusive_exclusive_regular_expression[autoMembers_19-5788-5789-HRManager-Managers-SuffDef1-5] 0.08
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_gid_and_role_inclusive_exclusive_regular_expression[autoMembers_18-6788-6789-Junior-Managers-SuffDef1-5] 0.08
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_gid_and_role_inclusive_exclusive_regular_expression[autoMembers_17-562-563-Junior-Managers-SuffDef1-5] 0.05
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_gid_and_role_inclusive_exclusive_regular_expression[autoMembers_16-6788-6789-Manager-Managers-SuffDef1-5] 0.06
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_managers_contractors_exclusive_regex_rules_member_uid[autoMembers_32-555-720-Employee-SubDef1-SubDef3] 0.07
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_managers_contractors_exclusive_regex_rules_member_uid[autoMembers_31-515-200-Junior-SubDef1-SubDef5] 0.10
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_managers_contractors_exclusive_regex_rules_member_uid[autoMembers_30-999-400-Supervisor-SubDef1-SubDef2] 0.07
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_managers_contractors_exclusive_regex_rules_member_uid[autoMembers_28-555-3663-ContractHR-Contractors,cn=subsuffGroups-Managers,cn=subsuffGroups] 0.05
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_managers_inclusive_regex_rule[autoMembers_27-595-690-ContractHR-Managers-Contractors] 0.06
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_managers_inclusive_regex_rule[autoMembers_29-8195-2753-Employee-Contractors-Managers] 0.05
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_managers_inclusive_regex_rule[autoMembers_33-545-3333-Supervisor-Contractors-Managers] 0.05
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_managers_inclusive_regex_rule[autoMembers_34-8195-693-Temporary-Managers-Contractors] 0.07
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_reject_invalid_config_and_we_donot_deadlock_the_server 8.21
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_automemtask_re_build_task 10.61
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_automemtask_export_task 11.08
-------------------------------Captured log call--------------------------------
INFO  lib389:tasks.py:1039 Automember Export Updates task (task-10282020_202825) completed successfully
Passed suites/automember_plugin/basic_test.py::test_automemtask_mapping 2.13
-------------------------------Captured log call--------------------------------
INFO  lib389:tasks.py:1087 Automember Map Updates task (task-10282020_202827) completed successfully
Passed suites/automember_plugin/basic_test.py::test_automemtask_re_build 8.24
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_automemtask_export 13.29
-------------------------------Captured log call--------------------------------
INFO  lib389:tasks.py:1039 Automember Export Updates task (task-10282020_202846) completed successfully
Passed suites/automember_plugin/basic_test.py::test_automemtask_run_re_build 17.44
No log output captured.
Passed suites/automember_plugin/basic_test.py::test_automemtask_run_export 14.56
-------------------------------Captured log call--------------------------------
INFO  lib389:tasks.py:1039 Automember Export Updates task (task-10282020_202922) completed successfully
Passed suites/automember_plugin/configuration_test.py::test_configuration 4.53
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/backups/backup_test.py::test_missing_backend 5.67
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/basic/basic_test.py::test_basic_ops 0.14
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/basic/basic_test.py::test_basic_import_export 48.61
------------------------------Captured stderr call------------------------------
ldiffile: /var/lib/dirsrv/slapd-standalone1/ldif/export.ldif
Passed suites/basic/basic_test.py::test_basic_backup 9.56
-------------------------------Captured log call--------------------------------
INFO  lib389:tasks.py:619 Backup task backup_10282020_203057 completed successfully INFO  lib389:tasks.py:673 Restore task restore_10282020_203100 completed successfully
Passed suites/basic/basic_test.py::test_basic_db2index 5.62
------------------------------Captured stderr call------------------------------
[28/Oct/2020:20:31:10.012438819 -0400] - INFO - ldbm_instance_config_cachememsize_set - force a minimal value 512000 [28/Oct/2020:20:31:10.020083296 -0400] - INFO - check_and_set_import_cache - pagesize: 4096, available bytes 7578624000, process usage 22753280 [28/Oct/2020:20:31:10.025022821 -0400] - INFO - check_and_set_import_cache - Import allocates 2960400KB import cache. [28/Oct/2020:20:31:10.029322008 -0400] - INFO - bdb_copy_directory - Backing up file 0 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/userRoot/givenName.db) [28/Oct/2020:20:31:10.033111228 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/givenName.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/userRoot/givenName.db [28/Oct/2020:20:31:10.038707068 -0400] - INFO - bdb_copy_directory - Backing up file 1 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/userRoot/aci.db) [28/Oct/2020:20:31:10.042106553 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/aci.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/userRoot/aci.db [28/Oct/2020:20:31:10.048400743 -0400] - INFO - bdb_copy_directory - Backing up file 2 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/userRoot/sn.db) [28/Oct/2020:20:31:10.052073393 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/sn.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/userRoot/sn.db [28/Oct/2020:20:31:10.055810614 -0400] - INFO - bdb_copy_directory - Backing up file 3 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/userRoot/numsubordinates.db) [28/Oct/2020:20:31:10.060375690 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/numsubordinates.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/userRoot/numsubordinates.db [28/Oct/2020:20:31:10.064100793 -0400] - INFO - bdb_copy_directory - Backing up file 4 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/userRoot/entryusn.db) [28/Oct/2020:20:31:10.067542244 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/entryusn.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/userRoot/entryusn.db [28/Oct/2020:20:31:10.071136118 -0400] - INFO - bdb_copy_directory - Backing up file 5 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/userRoot/nsuniqueid.db) [28/Oct/2020:20:31:10.074360782 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/nsuniqueid.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/userRoot/nsuniqueid.db [28/Oct/2020:20:31:10.077525292 -0400] - INFO - bdb_copy_directory - Backing up file 6 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/userRoot/ancestorid.db) [28/Oct/2020:20:31:10.083748565 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/ancestorid.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/userRoot/ancestorid.db [28/Oct/2020:20:31:10.088825220 -0400] - INFO - bdb_copy_directory - Backing up file 7 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/userRoot/parentid.db) [28/Oct/2020:20:31:10.092581020 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/parentid.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/userRoot/parentid.db [28/Oct/2020:20:31:10.096480041 -0400] - INFO - bdb_copy_directory - Backing up file 8 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/userRoot/mail.db) [28/Oct/2020:20:31:10.100139015 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/mail.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/userRoot/mail.db [28/Oct/2020:20:31:10.104612092 -0400] - INFO - bdb_copy_directory - Backing up file 9 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/userRoot/telephoneNumber.db) [28/Oct/2020:20:31:10.107752200 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/telephoneNumber.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/userRoot/telephoneNumber.db [28/Oct/2020:20:31:10.111657008 -0400] - INFO - bdb_copy_directory - Backing up file 10 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/userRoot/cn.db) [28/Oct/2020:20:31:10.114995207 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/cn.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/userRoot/cn.db [28/Oct/2020:20:31:10.117991040 -0400] - INFO - bdb_copy_directory - Backing up file 11 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/userRoot/DBVERSION) [28/Oct/2020:20:31:10.121081012 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/DBVERSION to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/userRoot/DBVERSION [28/Oct/2020:20:31:10.125417735 -0400] - INFO - bdb_copy_directory - Backing up file 12 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/userRoot/uid.db) [28/Oct/2020:20:31:10.128490145 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/uid.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/userRoot/uid.db [28/Oct/2020:20:31:10.131520693 -0400] - INFO - bdb_copy_directory - Backing up file 13 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/userRoot/entryrdn.db) [28/Oct/2020:20:31:10.134189902 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/entryrdn.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/userRoot/entryrdn.db [28/Oct/2020:20:31:10.137401046 -0400] - INFO - bdb_copy_directory - Backing up file 14 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/userRoot/uniquemember.db) [28/Oct/2020:20:31:10.140310642 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/uniquemember.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/userRoot/uniquemember.db [28/Oct/2020:20:31:10.143476915 -0400] - INFO - bdb_copy_directory - Backing up file 15 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/userRoot/id2entry.db) [28/Oct/2020:20:31:10.146743371 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/id2entry.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/userRoot/id2entry.db [28/Oct/2020:20:31:10.150585615 -0400] - INFO - bdb_copy_directory - Backing up file 16 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/userRoot/objectclass.db) [28/Oct/2020:20:31:10.153557683 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/objectclass.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/userRoot/objectclass.db [28/Oct/2020:20:31:10.156815797 -0400] - INFO - upgradedb_core - userRoot: Start upgradedb. [28/Oct/2020:20:31:10.160545556 -0400] - INFO - bdb_instance_start - Import is running with nsslapd-db-private-import-mem on; No other process is allowed to access the database [28/Oct/2020:20:31:10.163978138 -0400] - INFO - check_and_set_import_cache - pagesize: 4096, available bytes 7578103808, process usage 23560192 [28/Oct/2020:20:31:10.167212846 -0400] - INFO - check_and_set_import_cache - Import allocates 2960196KB import cache. [28/Oct/2020:20:31:10.397724081 -0400] - INFO - bdb_import_main - reindex userRoot: Index buffering enabled with bucket size 100 [28/Oct/2020:20:31:11.103888388 -0400] - INFO - import_monitor_threads - reindex userRoot: Workers finished; cleaning up... [28/Oct/2020:20:31:11.307751435 -0400] - INFO - import_monitor_threads - reindex userRoot: Workers cleaned up. [28/Oct/2020:20:31:11.311526641 -0400] - INFO - bdb_import_main - reindex userRoot: Cleaning up producer thread... [28/Oct/2020:20:31:11.314798261 -0400] - INFO - bdb_import_main - reindex userRoot: Indexing complete. Post-processing... [28/Oct/2020:20:31:11.317999273 -0400] - INFO - bdb_import_main - reindex userRoot: Generating numsubordinates (this may take several minutes to complete)... [28/Oct/2020:20:31:11.321267727 -0400] - INFO - bdb_import_main - reindex userRoot: Generating numSubordinates complete. [28/Oct/2020:20:31:11.324948181 -0400] - INFO - bdb_get_nonleaf_ids - reindex userRoot: Gathering ancestorid non-leaf IDs... [28/Oct/2020:20:31:11.328376992 -0400] - INFO - bdb_get_nonleaf_ids - reindex userRoot: Finished gathering ancestorid non-leaf IDs. [28/Oct/2020:20:31:11.332010415 -0400] - INFO - ldbm_get_nonleaf_ids - reindex userRoot: Starting sort of ancestorid non-leaf IDs... [28/Oct/2020:20:31:11.335370564 -0400] - INFO - ldbm_get_nonleaf_ids - reindex userRoot: Finished sort of ancestorid non-leaf IDs. [28/Oct/2020:20:31:11.344887088 -0400] - INFO - bdb_ancestorid_new_idl_create_index - reindex userRoot: Creating ancestorid index (new idl)... [28/Oct/2020:20:31:11.349029281 -0400] - INFO - bdb_ancestorid_new_idl_create_index - reindex userRoot: Created ancestorid index (new idl). [28/Oct/2020:20:31:11.352573374 -0400] - INFO - bdb_import_main - reindex userRoot: Flushing caches... [28/Oct/2020:20:31:11.356047939 -0400] - INFO - bdb_import_main - reindex userRoot: Closing files... [28/Oct/2020:20:31:11.453970812 -0400] - INFO - bdb_import_main - reindex userRoot: Reindexing complete. Processed 160 entries in 1 seconds. (160.00 entries/sec) [28/Oct/2020:20:31:11.459314233 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/log.0000000001 to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/log.0000000001 [28/Oct/2020:20:31:11.472850128 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/DBVERSION to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T20:31:09.886404/DBVERSION [28/Oct/2020:20:31:11.476970721 -0400] - INFO - bdb_pre_close - All database threads now stopped [28/Oct/2020:20:31:11.644366645 -0400] - INFO - slapd_exemode_db2index - Backend Instance: userRoot [28/Oct/2020:20:31:11.651906735 -0400] - INFO - ldbm_instance_config_cachememsize_set - force a minimal value 512000 [28/Oct/2020:20:31:11.659100742 -0400] - INFO - bdb_instance_start - Import is running with nsslapd-db-private-import-mem on; No other process is allowed to access the database [28/Oct/2020:20:31:11.668970517 -0400] - INFO - check_and_set_import_cache - pagesize: 4096, available bytes 7578107904, process usage 22884352 [28/Oct/2020:20:31:11.672595262 -0400] - INFO - check_and_set_import_cache - Import allocates 2960198KB import cache. [28/Oct/2020:20:31:11.812329062 -0400] - INFO - bdb_db2index - userRoot: Indexing attribute: uid [28/Oct/2020:20:31:11.816220750 -0400] - ERR - libdb - BDB1566 txn_checkpoint interface requires an environment configured for the transaction subsystem [28/Oct/2020:20:31:11.819576889 -0400] - ERR - bdb_force_checkpoint - Checkpoint FAILED, error Invalid argument (22) [28/Oct/2020:20:31:11.833791512 -0400] - INFO - bdb_db2index - userRoot: Finished indexing. [28/Oct/2020:20:31:11.856412487 -0400] - INFO - bdb_pre_close - All database threads now stopped
Passed suites/basic/basic_test.py::test_basic_acl 0.90
No log output captured.
Passed suites/basic/basic_test.py::test_basic_searches 0.08
No log output captured.
Passed suites/basic/basic_test.py::test_search_req_attrs[attrs0-cn-False] 0.00
No log output captured.
Passed suites/basic/basic_test.py::test_search_req_attrs[attrs1-cn-True] 0.00
No log output captured.
Passed suites/basic/basic_test.py::test_search_req_attrs[attrs2-nsUniqueId-True] 0.00
No log output captured.
Passed suites/basic/basic_test.py::test_search_req_attrs[attrs3-cn-True] 0.00
No log output captured.
Passed suites/basic/basic_test.py::test_search_req_attrs[attrs4-cn-True] 0.00
No log output captured.
Passed suites/basic/basic_test.py::test_basic_referrals 3.51
No log output captured.
Passed suites/basic/basic_test.py::test_basic_systemctl 11.66
No log output captured.
Passed suites/basic/basic_test.py::test_basic_ldapagent 5.02
No log output captured.
Passed suites/basic/basic_test.py::test_basic_dse_survives_kill9 11.34
No log output captured.
Passed suites/basic/basic_test.py::test_def_rootdse_attr[namingContexts] 0.02
No log output captured.
Passed suites/basic/basic_test.py::test_def_rootdse_attr[supportedLDAPVersion] 0.02
No log output captured.
Passed suites/basic/basic_test.py::test_def_rootdse_attr[supportedControl] 0.02
No log output captured.
Passed suites/basic/basic_test.py::test_def_rootdse_attr[supportedExtension] 0.02
No log output captured.
Passed suites/basic/basic_test.py::test_def_rootdse_attr[supportedSASLMechanisms] 0.02
No log output captured.
Passed suites/basic/basic_test.py::test_def_rootdse_attr[vendorName] 0.02
No log output captured.
Passed suites/basic/basic_test.py::test_def_rootdse_attr[vendorVersion] 0.02
No log output captured.
Passed suites/basic/basic_test.py::test_mod_def_rootdse_attr[namingContexts] 0.00
No log output captured.
Passed suites/basic/basic_test.py::test_mod_def_rootdse_attr[supportedLDAPVersion] 0.00
No log output captured.
Passed suites/basic/basic_test.py::test_mod_def_rootdse_attr[supportedControl] 0.00
No log output captured.
Passed suites/basic/basic_test.py::test_mod_def_rootdse_attr[supportedExtension] 0.00
No log output captured.
Passed suites/basic/basic_test.py::test_mod_def_rootdse_attr[supportedSASLMechanisms] 0.00
No log output captured.
Passed suites/basic/basic_test.py::test_mod_def_rootdse_attr[vendorName] 0.25
No log output captured.
Passed suites/basic/basic_test.py::test_mod_def_rootdse_attr[vendorVersion] 0.00
No log output captured.
Passed suites/basic/basic_test.py::test_basic_anonymous_search 0.03
No log output captured.
Passed suites/basic/basic_test.py::test_search_original_type 0.02
No log output captured.
Passed suites/basic/basic_test.py::test_search_ou 0.01
No log output captured.
Passed suites/basic/basic_test.py::test_connection_buffer_size 0.02
No log output captured.
Passed suites/basic/basic_test.py::test_critical_msg_on_empty_range_idl 6.25
No log output captured.
Passed suites/basic/basic_test.py::test_ldbm_modification_audit_log 11.58
No log output captured.
Passed suites/basic/basic_test.py::test_dscreate 13.00
------------------------------Captured stdout call------------------------------
Starting installation... Completed installation for test_dscreate
Passed suites/basic/basic_test.py::test_dscreate_ldapi 0.00
-----------------------------Captured stdout setup------------------------------
Starting installation... Completed installation for test-longname-deadbeef-deadbeef-deadbeef-deadbeef-deadbeef
-------------------------------Captured log call--------------------------------
DEBUG  RootDSE:_mapped_object.py:635 get_attr_vals('supportedControl')
Passed suites/basic/basic_test.py::test_dscreate_multiple_dashes_name 16.30
-----------------------------Captured stdout setup------------------------------
Starting installation... Completed installation for test-longname-deadbeef-deadbeef-deadbeef-deadbeef-deadbeef
Passed suites/basic/basic_test.py::test_dscreate_with_different_rdn[c=uk] 14.20
------------------------------Captured stdout call------------------------------
Starting installation... Completed installation for test_different_rdn
Passed suites/basic/basic_test.py::test_dscreate_with_different_rdn[cn=test_user] 14.51
-----------------------------Captured stdout setup------------------------------
Removing instance ... Completed instance removal
------------------------------Captured stdout call------------------------------
Starting installation... Completed installation for test_different_rdn
Passed suites/basic/basic_test.py::test_dscreate_with_different_rdn[dc=example,dc=com] 15.59
-----------------------------Captured stdout setup------------------------------
Removing instance ... Completed instance removal
------------------------------Captured stdout call------------------------------
Starting installation... Completed installation for test_different_rdn
Passed suites/basic/basic_test.py::test_dscreate_with_different_rdn[o=south] 15.84
-----------------------------Captured stdout setup------------------------------
Removing instance ... Completed instance removal
------------------------------Captured stdout call------------------------------
Starting installation... Completed installation for test_different_rdn
Passed suites/basic/basic_test.py::test_dscreate_with_different_rdn[ou=sales] 14.45
-----------------------------Captured stdout setup------------------------------
Removing instance ... Completed instance removal
------------------------------Captured stdout call------------------------------
Starting installation... Completed installation for test_different_rdn
Passed suites/basic/basic_test.py::test_dscreate_with_different_rdn[wrong=some_value] 12.78
-----------------------------Captured stdout setup------------------------------
Removing instance ... Completed instance removal
------------------------------Captured stdout call------------------------------
Starting installation... Error: Instance creation failed! Suffix RDN 'wrong' in 'wrong=some_value' is not supported. Supported RDN's are: 'c', 'cn', 'dc', 'o', and 'ou'
-------------------------------Captured log call--------------------------------
CRITICAL tests.suites.basic.basic_test:basic_test.py:1474 dscreate failed! Error (1) None
Passed suites/betxns/betxn_test.py::test_betxt_7bit 4.57
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.suites.betxns.betxn_test:betxn_test.py:52 Running test_betxt_7bit... INFO  tests.suites.betxns.betxn_test:betxn_test.py:78 test_betxt_7bit: PASSED
Passed suites/betxns/betxn_test.py::test_betxn_attr_uniqueness 4.80
-------------------------------Captured log call--------------------------------
INFO  tests.suites.betxns.betxn_test:betxn_test.py:133 test_betxn_attr_uniqueness: PASSED
Passed suites/betxns/betxn_test.py::test_betxn_memberof 4.46
-------------------------------Captured log call--------------------------------
INFO  tests.suites.betxns.betxn_test:betxn_test.py:179 test_betxn_memberof: PASSED
Passed suites/betxns/betxn_test.py::test_betxn_modrdn_memberof_cache_corruption 5.20
-------------------------------Captured log call--------------------------------
INFO  tests.suites.betxns.betxn_test:betxn_test.py:233 test_betxn_modrdn_memberof: PASSED
Passed suites/betxns/betxn_test.py::test_ri_and_mep_cache_corruption 0.48
-------------------------------Captured log call--------------------------------
INFO  tests.suites.betxns.betxn_test:betxn_test.py:357 Test PASSED
Passed suites/clu/clu_test.py::test_clu_pwdhash 0.04
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.suites.clu.clu_test:clu_test.py:40 Running test_clu_pwdhash... INFO  tests.suites.clu.clu_test:clu_test.py:54 pwdhash generated: {SSHA}Q2aJUlHn+6LWZr61f+FgIkfrscoQdUcB/SElcw== INFO  tests.suites.clu.clu_test:clu_test.py:55 test_clu_pwdhash: PASSED
Passed suites/clu/clu_test.py::test_clu_pwdhash_mod 0.30
-------------------------------Captured log call--------------------------------
INFO  tests.suites.clu.clu_test:clu_test.py:78 Running test_clu_pwdhash_mod... INFO  tests.suites.clu.clu_test:clu_test.py:87 pwdhash generated: {SSHA256}DgxCilzBS+ALegL6oBKQbkeUBWlc7n8IRYl6cYvEpMijndtXm3VXdA== INFO  tests.suites.clu.clu_test:clu_test.py:88 returned the hashed string using the algorithm set in nsslapd-rootpwstoragescheme
Passed suites/clu/dbgen_test.py::test_dsconf_dbgen_users 6.13
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.suites.clu.dbgen_test:dbgen_test.py:119 Run ldifgen to create users ldif INFO  tests.suites.clu.dbgen_test:dbgen.py:56 Generating LDIF with the following options: INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - suffix=dc=example,dc=com INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - parent=ou=people,dc=example,dc=com INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - number=1000 INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - rdn-cn=False INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - generic=True INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - start-idx=50 INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - localize=False INFO  tests.suites.clu.dbgen_test:dbgen.py:62 - ldif-file=/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO  tests.suites.clu.dbgen_test:dbgen.py:63 Writing LDIF ... INFO  tests.suites.clu.dbgen_test:dbgen.py:196 Successfully created LDIF file: /var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO  tests.suites.clu.dbgen_test:dbgen_test.py:122 Check if file exists INFO  tests.suites.clu.dbgen_test:dbgen_test.py:67 Check if content is present in output INFO  tests.suites.clu.dbgen_test:dbgen_test.py:71 Reset log file for next test INFO  tests.suites.clu.dbgen_test:dbgen_test.py:127 Get number of accounts before import INFO  tests.suites.clu.dbgen_test:dbgen_test.py:48 Stopping the server and running offline import... INFO  tests.suites.clu.dbgen_test:dbgen_test.py:133 Check that accounts are imported
Passed suites/clu/dbgen_test.py::test_dsconf_dbgen_groups 32.49
------------------------------Captured stderr call------------------------------
ldap_add: Already exists (68) ldap_add: Already exists (68) ldap_add: Already exists (68)
-------------------------------Captured log call--------------------------------
INFO  tests.suites.clu.dbgen_test:dbgen_test.py:183 Run ldifgen to create group ldif INFO  tests.suites.clu.dbgen_test:dbgen.py:56 Generating LDIF with the following options: INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - NAME=myGroup INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - parent=ou=groups,dc=example,dc=com INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - suffix=dc=example,dc=com INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - number=1 INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - num-members=1000 INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - create-members=True INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - member-attr=uniquemember INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - member-parent=ou=people,dc=example,dc=com INFO  tests.suites.clu.dbgen_test:dbgen.py:62 - ldif-file=/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO  tests.suites.clu.dbgen_test:dbgen.py:63 Writing LDIF ... INFO  tests.suites.clu.dbgen_test:dbgen.py:250 Successfully created LDIF file: /var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO  tests.suites.clu.dbgen_test:dbgen_test.py:186 Check if file exists INFO  tests.suites.clu.dbgen_test:dbgen_test.py:67 Check if content is present in output INFO  tests.suites.clu.dbgen_test:dbgen_test.py:71 Reset log file for next test INFO  tests.suites.clu.dbgen_test:dbgen_test.py:191 Get number of accounts before import INFO  tests.suites.clu.dbgen_test:dbgen_test.py:57 Add entries from ldif file with ldapmodify INFO  tests.suites.clu.dbgen_test:dbgen_test.py:200 Check that accounts are imported INFO  tests.suites.clu.dbgen_test:dbgen_test.py:203 Check that group is imported
Passed suites/clu/dbgen_test.py::test_dsconf_dbgen_cos_classic 0.15
-------------------------------Captured log call--------------------------------
INFO  tests.suites.clu.dbgen_test:dbgen_test.py:256 Run ldifgen to create COS definition ldif INFO  tests.suites.clu.dbgen_test:dbgen.py:56 Generating LDIF with the following options: INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - type=classic INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - NAME=My_Postal_Def INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - parent=ou=cos definitions,dc=example,dc=com INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - create-parent=True INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - cos-specifier=businessCategory INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - cos-attr=['postalcode', 'telephonenumber'] INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - cos-template=cn=sales,cn=classicCoS,dc=example,dc=com INFO  tests.suites.clu.dbgen_test:dbgen.py:62 - ldif-file=/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO  tests.suites.clu.dbgen_test:dbgen.py:63 Writing LDIF ... INFO  tests.suites.clu.dbgen_test:dbgen.py:304 Successfully created LDIF file: /var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO  tests.suites.clu.dbgen_test:dbgen_test.py:259 Check if file exists INFO  tests.suites.clu.dbgen_test:dbgen_test.py:67 Check if content is present in output INFO  tests.suites.clu.dbgen_test:dbgen_test.py:71 Reset log file for next test INFO  tests.suites.clu.dbgen_test:dbgen_test.py:57 Add entries from ldif file with ldapmodify INFO  tests.suites.clu.dbgen_test:dbgen_test.py:267 Check that COS definition is imported
Passed suites/clu/dbgen_test.py::test_dsconf_dbgen_cos_pointer 0.03
-------------------------------Captured log call--------------------------------
INFO  tests.suites.clu.dbgen_test:dbgen_test.py:322 Run ldifgen to create COS definition ldif INFO  tests.suites.clu.dbgen_test:dbgen.py:56 Generating LDIF with the following options: INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - type=pointer INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - NAME=My_Postal_Def_pointer INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - parent=ou=cos pointer definitions,dc=example,dc=com INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - create-parent=True INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - cos-attr=['postalcode', 'telephonenumber'] INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - cos-template=cn=sales,cn=pointerCoS,dc=example,dc=com INFO  tests.suites.clu.dbgen_test:dbgen.py:62 - ldif-file=/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO  tests.suites.clu.dbgen_test:dbgen.py:63 Writing LDIF ... INFO  tests.suites.clu.dbgen_test:dbgen.py:304 Successfully created LDIF file: /var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO  tests.suites.clu.dbgen_test:dbgen_test.py:325 Check if file exists INFO  tests.suites.clu.dbgen_test:dbgen_test.py:67 Check if content is present in output INFO  tests.suites.clu.dbgen_test:dbgen_test.py:71 Reset log file for next test INFO  tests.suites.clu.dbgen_test:dbgen_test.py:57 Add entries from ldif file with ldapmodify INFO  tests.suites.clu.dbgen_test:dbgen_test.py:333 Check that COS definition is imported
Passed suites/clu/dbgen_test.py::test_dsconf_dbgen_cos_indirect 0.06
-------------------------------Captured log call--------------------------------
INFO  tests.suites.clu.dbgen_test:dbgen_test.py:387 Run ldifgen to create COS definition ldif INFO  tests.suites.clu.dbgen_test:dbgen.py:56 Generating LDIF with the following options: INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - type=indirect INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - NAME=My_Postal_Def_indirect INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - parent=ou=cos indirect definitions,dc=example,dc=com INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - create-parent=True INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - cos-specifier=businessCategory INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - cos-attr=['postalcode', 'telephonenumber'] INFO  tests.suites.clu.dbgen_test:dbgen.py:62 - ldif-file=/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO  tests.suites.clu.dbgen_test:dbgen.py:63 Writing LDIF ... INFO  tests.suites.clu.dbgen_test:dbgen.py:304 Successfully created LDIF file: /var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO  tests.suites.clu.dbgen_test:dbgen_test.py:390 Check if file exists INFO  tests.suites.clu.dbgen_test:dbgen_test.py:67 Check if content is present in output INFO  tests.suites.clu.dbgen_test:dbgen_test.py:71 Reset log file for next test INFO  tests.suites.clu.dbgen_test:dbgen_test.py:57 Add entries from ldif file with ldapmodify INFO  tests.suites.clu.dbgen_test:dbgen_test.py:398 Check that COS definition is imported
Passed suites/clu/dbgen_test.py::test_dsconf_dbgen_cos_template 0.03
-------------------------------Captured log call--------------------------------
INFO  tests.suites.clu.dbgen_test:dbgen_test.py:449 Run ldifgen to create COS template ldif INFO  tests.suites.clu.dbgen_test:dbgen.py:56 Generating LDIF with the following options: INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - NAME=My_Template INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - parent=ou=cos templates,dc=example,dc=com INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - create-parent=True INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - cos-priority=1 INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - cos-attr-val=postalcode:12345 INFO  tests.suites.clu.dbgen_test:dbgen.py:62 - ldif-file=/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO  tests.suites.clu.dbgen_test:dbgen.py:63 Writing LDIF ... INFO  tests.suites.clu.dbgen_test:dbgen.py:341 Successfully created LDIF file: /var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO  tests.suites.clu.dbgen_test:dbgen_test.py:452 Check if file exists INFO  tests.suites.clu.dbgen_test:dbgen_test.py:67 Check if content is present in output INFO  tests.suites.clu.dbgen_test:dbgen_test.py:71 Reset log file for next test INFO  tests.suites.clu.dbgen_test:dbgen_test.py:57 Add entries from ldif file with ldapmodify INFO  tests.suites.clu.dbgen_test:dbgen_test.py:460 Check that COS template is imported
Passed suites/clu/dbgen_test.py::test_dsconf_dbgen_managed_role 0.03
-------------------------------Captured log call--------------------------------
INFO  tests.suites.clu.dbgen_test:dbgen_test.py:511 Run ldifgen to create managed role ldif INFO  tests.suites.clu.dbgen_test:dbgen.py:56 Generating LDIF with the following options: INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - NAME=My_Managed_Role INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - parent=ou=managed roles,dc=example,dc=com INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - create-parent=True INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - type=managed INFO  tests.suites.clu.dbgen_test:dbgen.py:62 - ldif-file=/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO  tests.suites.clu.dbgen_test:dbgen.py:63 Writing LDIF ... INFO  tests.suites.clu.dbgen_test:dbgen.py:391 Successfully created LDIF file: /var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO  tests.suites.clu.dbgen_test:dbgen_test.py:514 Check if file exists INFO  tests.suites.clu.dbgen_test:dbgen_test.py:67 Check if content is present in output INFO  tests.suites.clu.dbgen_test:dbgen_test.py:71 Reset log file for next test INFO  tests.suites.clu.dbgen_test:dbgen_test.py:57 Add entries from ldif file with ldapmodify INFO  tests.suites.clu.dbgen_test:dbgen_test.py:522 Check that managed role is imported
Passed suites/clu/dbgen_test.py::test_dsconf_dbgen_filtered_role 0.02
-------------------------------Captured log call--------------------------------
INFO  tests.suites.clu.dbgen_test:dbgen_test.py:571 Run ldifgen to create filtered role ldif INFO  tests.suites.clu.dbgen_test:dbgen.py:56 Generating LDIF with the following options: INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - NAME=My_Filtered_Role INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - parent=ou=filtered roles,dc=example,dc=com INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - create-parent=True INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - type=filtered INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - filter="objectclass=posixAccount" INFO  tests.suites.clu.dbgen_test:dbgen.py:62 - ldif-file=/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO  tests.suites.clu.dbgen_test:dbgen.py:63 Writing LDIF ... INFO  tests.suites.clu.dbgen_test:dbgen.py:391 Successfully created LDIF file: /var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO  tests.suites.clu.dbgen_test:dbgen_test.py:574 Check if file exists INFO  tests.suites.clu.dbgen_test:dbgen_test.py:67 Check if content is present in output INFO  tests.suites.clu.dbgen_test:dbgen_test.py:71 Reset log file for next test INFO  tests.suites.clu.dbgen_test:dbgen_test.py:57 Add entries from ldif file with ldapmodify INFO  tests.suites.clu.dbgen_test:dbgen_test.py:582 Check that filtered role is imported
Passed suites/clu/dbgen_test.py::test_dsconf_dbgen_nested_role 0.03
-------------------------------Captured log call--------------------------------
INFO  tests.suites.clu.dbgen_test:dbgen_test.py:632 Run ldifgen to create nested role ldif INFO  tests.suites.clu.dbgen_test:dbgen.py:56 Generating LDIF with the following options: INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - NAME=My_Nested_Role INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - parent=ou=nested roles,dc=example,dc=com INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - create-parent=True INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - type=nested INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - role-dn=['cn=some_role,ou=roles,dc=example,dc=com'] INFO  tests.suites.clu.dbgen_test:dbgen.py:62 - ldif-file=/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO  tests.suites.clu.dbgen_test:dbgen.py:63 Writing LDIF ... INFO  tests.suites.clu.dbgen_test:dbgen.py:391 Successfully created LDIF file: /var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO  tests.suites.clu.dbgen_test:dbgen_test.py:635 Check if file exists INFO  tests.suites.clu.dbgen_test:dbgen_test.py:67 Check if content is present in output INFO  tests.suites.clu.dbgen_test:dbgen_test.py:71 Reset log file for next test INFO  tests.suites.clu.dbgen_test:dbgen_test.py:57 Add entries from ldif file with ldapmodify INFO  tests.suites.clu.dbgen_test:dbgen_test.py:643 Check that nested role is imported
Passed suites/clu/dbgen_test.py::test_dsconf_dbgen_mod_ldif_mixed 35.24
------------------------------Captured stderr call------------------------------
ldap_modify: Operation not allowed on RDN (67) ldap_modify: Operation not allowed on RDN (67) ldap_modify: Operation not allowed on RDN (67) ldap_modify: Operation not allowed on RDN (67) ldap_modify: Operation not allowed on RDN (67) ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldapmodify: extra lines at end (line 43453, entry "uid=user0999,dc=example,dc=com")
-------------------------------Captured log call--------------------------------
INFO  tests.suites.clu.dbgen_test:dbgen_test.py:702 Run ldifgen to create modification ldif INFO  tests.suites.clu.dbgen_test:dbgen.py:56 Generating LDIF with the following options: INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - parent=dc=example,dc=com INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - create-users=True INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - delete-users=True INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - create-parent=False INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - num-users=1000 INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - add-users=100 INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - del-users=999 INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - modrdn-users=100 INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - mod-users=10 INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - mod-attrs=['cn', 'uid', 'sn'] INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - randomize=False INFO  tests.suites.clu.dbgen_test:dbgen.py:62 - ldif-file=/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO  tests.suites.clu.dbgen_test:dbgen.py:63 Writing LDIF ... INFO  tests.suites.clu.dbgen_test:dbgen.py:467 Successfully created LDIF file: /var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO  tests.suites.clu.dbgen_test:dbgen_test.py:705 Check if file exists INFO  tests.suites.clu.dbgen_test:dbgen_test.py:67 Check if content is present in output INFO  tests.suites.clu.dbgen_test:dbgen_test.py:71 Reset log file for next test INFO  tests.suites.clu.dbgen_test:dbgen_test.py:710 Get number of accounts before import INFO  tests.suites.clu.dbgen_test:dbgen_test.py:57 Add entries from ldif file with ldapmodify INFO  tests.suites.clu.dbgen_test:dbgen_test.py:719 Check that some accounts are imported
Passed suites/clu/dbgen_test.py::test_dsconf_dbgen_nested_ldif 24.79
------------------------------Captured stderr call------------------------------
ldap_add: Already exists (68)
-------------------------------Captured log call--------------------------------
INFO  tests.suites.clu.dbgen_test:dbgen_test.py:759 Run ldifgen to create nested ldif INFO  tests.suites.clu.dbgen_test:dbgen.py:56 Generating LDIF with the following options: INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - suffix=dc=example,dc=com INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - node-limit=100 INFO  tests.suites.clu.dbgen_test:dbgen.py:61 - num-users=600 INFO  tests.suites.clu.dbgen_test:dbgen.py:62 - ldif-file=/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO  tests.suites.clu.dbgen_test:dbgen.py:63 Writing LDIF ... INFO  tests.suites.clu.dbgen_test:dbgen.py:500 Successfully created nested LDIF file (/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif) containing 6 nodes/subtrees INFO  tests.suites.clu.dbgen_test:dbgen_test.py:762 Check if file exists INFO  tests.suites.clu.dbgen_test:dbgen_test.py:67 Check if content is present in output INFO  tests.suites.clu.dbgen_test:dbgen_test.py:71 Reset log file for next test INFO  tests.suites.clu.dbgen_test:dbgen_test.py:767 Get number of accounts before import INFO  tests.suites.clu.dbgen_test:dbgen_test.py:57 Add entries from ldif file with ldapmodify INFO  tests.suites.clu.dbgen_test:dbgen_test.py:779 Check that accounts are imported
Passed suites/clu/dbmon_test.py::test_dsconf_dbmon 0.24
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.suites.clu.dbmon_test:dbmon_test.py:164 Sanity check for syntax INFO  LogCapture:monitor.py:247 DB Monitor Report: 2020-10-28 20:37:36 INFO  LogCapture:monitor.py:248 -------------------------------------------------------- INFO  LogCapture:monitor.py:249 Database Cache: INFO  LogCapture:monitor.py:250 - Cache Hit Ratio: 100% INFO  LogCapture:monitor.py:251 - Free Space: 486.95 MB INFO  LogCapture:monitor.py:252 - Free Percentage: 100.0% INFO  LogCapture:monitor.py:253 - RO Page Drops: 0 INFO  LogCapture:monitor.py:254 - Pages In: 0 INFO  LogCapture:monitor.py:255 - Pages Out: 0 INFO  LogCapture:monitor.py:256 INFO  LogCapture:monitor.py:257 Normalized DN Cache: INFO  LogCapture:monitor.py:258 - Cache Hit Ratio: 72% INFO  LogCapture:monitor.py:259 - Free Space: 19.99 MB INFO  LogCapture:monitor.py:260 - Free Percentage: 99.9% INFO  LogCapture:monitor.py:261 - DN Count: 71 INFO  LogCapture:monitor.py:262 - Evictions: 0 INFO  LogCapture:monitor.py:263 INFO  LogCapture:monitor.py:264 Backends: INFO  LogCapture:monitor.py:266 - dc=example,dc=com (userRoot): INFO  LogCapture:monitor.py:267 - Entry Cache Hit Ratio: 40% INFO  LogCapture:monitor.py:268 - Entry Cache Count: 5 INFO  LogCapture:monitor.py:269 - Entry Cache Free Space: 1.31 GB INFO  LogCapture:monitor.py:270 - Entry Cache Free Percentage: 100.0% INFO  LogCapture:monitor.py:271 - Entry Cache Average Size: 3.65 KB INFO  LogCapture:monitor.py:272 - DN Cache Hit Ratio: 0% INFO  LogCapture:monitor.py:273 - DN Cache Count: 5 INFO  LogCapture:monitor.py:274 - DN Cache Free Space: 192.0 MB INFO  LogCapture:monitor.py:275 - DN Cache Free Percentage: 100.0% INFO  LogCapture:monitor.py:276 - DN Cache Average Size: 67.0 B INFO  LogCapture:monitor.py:286 INFO  tests.suites.clu.dbmon_test:dbmon_test.py:133 Clear the log INFO  tests.suites.clu.dbmon_test:dbmon_test.py:171 Sanity check for --indexes output INFO  LogCapture:monitor.py:247 DB Monitor Report: 2020-10-28 20:37:36 INFO  LogCapture:monitor.py:248 -------------------------------------------------------- INFO  LogCapture:monitor.py:249 Database Cache: INFO  LogCapture:monitor.py:250 - Cache Hit Ratio: 100% INFO  LogCapture:monitor.py:251 - Free Space: 486.95 MB INFO  LogCapture:monitor.py:252 - Free Percentage: 100.0% INFO  LogCapture:monitor.py:253 - RO Page Drops: 0 INFO  LogCapture:monitor.py:254 - Pages In: 0 INFO  LogCapture:monitor.py:255 - Pages Out: 0 INFO  LogCapture:monitor.py:256 INFO  LogCapture:monitor.py:257 Normalized DN Cache: INFO  LogCapture:monitor.py:258 - Cache Hit Ratio: 72% INFO  LogCapture:monitor.py:259 - Free Space: 19.99 MB INFO  LogCapture:monitor.py:260 - Free Percentage: 99.9% INFO  LogCapture:monitor.py:261 - DN Count: 71 INFO  LogCapture:monitor.py:262 - Evictions: 0 INFO  LogCapture:monitor.py:263 INFO  LogCapture:monitor.py:264 Backends: INFO  LogCapture:monitor.py:266 - dc=example,dc=com (userRoot): INFO  LogCapture:monitor.py:267 - Entry Cache Hit Ratio: 40% INFO  LogCapture:monitor.py:268 - Entry Cache Count: 5 INFO  LogCapture:monitor.py:269 - Entry Cache Free Space: 1.31 GB INFO  LogCapture:monitor.py:270 - Entry Cache Free Percentage: 100.0% INFO  LogCapture:monitor.py:271 - Entry Cache Average Size: 3.65 KB INFO  LogCapture:monitor.py:272 - DN Cache Hit Ratio: 0% INFO  LogCapture:monitor.py:273 - DN Cache Count: 5 INFO  LogCapture:monitor.py:274 - DN Cache Free Space: 192.0 MB INFO  LogCapture:monitor.py:275 - DN Cache Free Percentage: 100.0% INFO  LogCapture:monitor.py:276 - DN Cache Average Size: 67.0 B INFO  LogCapture:monitor.py:278 - Indexes: INFO  LogCapture:monitor.py:280 - Index: id2entry.db INFO  LogCapture:monitor.py:281 - Cache Hit: 8 INFO  LogCapture:monitor.py:282 - Cache Miss: 0 INFO  LogCapture:monitor.py:283 - Page In: 0 INFO  LogCapture:monitor.py:284 - Page Out: 0 INFO  LogCapture:monitor.py:285 INFO  LogCapture:monitor.py:280 - Index: numsubordinates.db INFO  LogCapture:monitor.py:281 - Cache Hit: 0 INFO  LogCapture:monitor.py:282 - Cache Miss: 0 INFO  LogCapture:monitor.py:283 - Page In: 0 INFO  LogCapture:monitor.py:284 - Page Out: 0 INFO  LogCapture:monitor.py:285 INFO  LogCapture:monitor.py:280 - Index: uid.db INFO  LogCapture:monitor.py:281 - Cache Hit: 0 INFO  LogCapture:monitor.py:282 - Cache Miss: 0 INFO  LogCapture:monitor.py:283 - Page In: 0 INFO  LogCapture:monitor.py:284 - Page Out: 0 INFO  LogCapture:monitor.py:285 INFO  LogCapture:monitor.py:280 - Index: nsuniqueid.db INFO  LogCapture:monitor.py:281 - Cache Hit: 0 INFO  LogCapture:monitor.py:282 - Cache Miss: 0 INFO  LogCapture:monitor.py:283 - Page In: 0 INFO  LogCapture:monitor.py:284 - Page Out: 0 INFO  LogCapture:monitor.py:285 INFO  LogCapture:monitor.py:280 - Index: entryrdn.db INFO  LogCapture:monitor.py:281 - Cache Hit: 16 INFO  LogCapture:monitor.py:282 - Cache Miss: 0 INFO  LogCapture:monitor.py:283 - Page In: 0 INFO  LogCapture:monitor.py:284 - Page Out: 0 INFO  LogCapture:monitor.py:285 INFO  LogCapture:monitor.py:280 - Index: aci.db INFO  LogCapture:monitor.py:281 - Cache Hit: 3 INFO  LogCapture:monitor.py:282 - Cache Miss: 0 INFO  LogCapture:monitor.py:283 - Page In: 0 INFO  LogCapture:monitor.py:284 - Page Out: 0 INFO  LogCapture:monitor.py:285 INFO  LogCapture:monitor.py:280 - Index: cn.db INFO  LogCapture:monitor.py:281 - Cache Hit: 0 INFO  LogCapture:monitor.py:282 - Cache Miss: 0 INFO  LogCapture:monitor.py:283 - Page In: 0 INFO  LogCapture:monitor.py:284 - Page Out: 0 INFO  LogCapture:monitor.py:285 INFO  LogCapture:monitor.py:280 - Index: ancestorid.db INFO  LogCapture:monitor.py:281 - Cache Hit: 0 INFO  LogCapture:monitor.py:282 - Cache Miss: 0 INFO  LogCapture:monitor.py:283 - Page In: 0 INFO  LogCapture:monitor.py:284 - Page Out: 0 INFO  LogCapture:monitor.py:285 INFO  LogCapture:monitor.py:280 - Index: parentid.db INFO  LogCapture:monitor.py:281 - Cache Hit: 0 INFO  LogCapture:monitor.py:282 - Cache Miss: 0 INFO  LogCapture:monitor.py:283 - Page In: 0 INFO  LogCapture:monitor.py:284 - Page Out: 0 INFO  LogCapture:monitor.py:285 INFO  LogCapture:monitor.py:280 - Index: objectclass.db INFO  LogCapture:monitor.py:281 - Cache Hit: 14 INFO  LogCapture:monitor.py:282 - Cache Miss: 0 INFO  LogCapture:monitor.py:283 - Page In: 0 INFO  LogCapture:monitor.py:284 - Page Out: 0 INFO  LogCapture:monitor.py:285 INFO  LogCapture:monitor.py:286 INFO  tests.suites.clu.dbmon_test:dbmon_test.py:133 Clear the log INFO  tests.suites.clu.dbmon_test:dbmon_test.py:179 Sanity check for --json output INFO  LogCapture:monitor.py:245 { "date": "2020-10-28 20:37:36", "dbcache": { "hit_ratio": "100", "free": "486.95 MB", "free_percentage": "100.0", "roevicts": "0", "pagein": "0", "pageout": "0" }, "ndncache": { "hit_ratio": "72", "free": "19.99 MB", "free_percentage": "99.9", "count": "71", "evictions": "0" }, "backends": { "userRoot": { "suffix": "dc=example,dc=com", "entry_cache_count": "5", "entry_cache_free": "1.31 GB", "entry_cache_free_percentage": "100.0", "entry_cache_size": "3.65 KB", "entry_cache_hit_ratio": "40", "dn_cache_count": "5", "dn_cache_free": "192.0 MB", "dn_cache_free_percentage": "100.0", "dn_cache_size": "67.0 B", "dn_cache_hit_ratio": "0", "indexes": [ { "name": "id2entry.db", "cachehit": "8", "cachemiss": "0", "pagein": "0", "pageout": "0" }, { "name": "numsubordinates.db", "cachehit": "0", "cachemiss": "0", "pagein": "0", "pageout": "0" }, { "name": "uid.db", "cachehit": "0", "cachemiss": "0", "pagein": "0", "pageout": "0" }, { "name": "nsuniqueid.db", "cachehit": "0", "cachemiss": "0", "pagein": "0", "pageout": "0" }, { "name": "entryrdn.db", "cachehit": "16", "cachemiss": "0", "pagein": "0", "pageout": "0" }, { "name": "aci.db", "cachehit": "3", "cachemiss": "0", "pagein": "0", "pageout": "0" }, { "name": "cn.db", "cachehit": "0", "cachemiss": "0", "pagein": "0", "pageout": "0" }, { "name": "ancestorid.db", "cachehit": "0", "cachemiss": "0", "pagein": "0", "pageout": "0" }, { "name": "parentid.db", "cachehit": "0", "cachemiss": "0", "pagein": "0", "pageout": "0" }, { "name": "objectclass.db", "cachehit": "14", "cachemiss": "0", "pagein": "0", "pageout": "0" } ] } } } INFO  tests.suites.clu.dbmon_test:dbmon_test.py:133 Clear the log
Passed suites/clu/dbverify_test.py::test_dsctl_dbverify 2.36
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
------------------------------Captured stderr call------------------------------
[28/Oct/2020:20:37:50.048851540 -0400] - INFO - ldbm_instance_config_cachememsize_set - force a minimal value 512000
-------------------------------Captured log call--------------------------------
INFO  tests.suites.clu.dbverify_test:dbverify_test.py:63 Run dbverify INFO  tests.suites.clu.dbverify_test:dbtasks.py:88 dbverify successful INFO  tests.suites.clu.dbverify_test:dbverify_test.py:67 Check dbverify was successful
Passed suites/clu/dsidm_config_test.py::test_dsidm_config_sssd 5.06
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
------------------------------Captured stdout call------------------------------
# # sssd.conf # Generated by 389 Directory Server - dsidm # # For more details see man sssd.conf and man sssd-ldap # Be sure to review the content of this file to ensure it is secure and correct # in your environment. [domain/ldap] # Uncomment this for more verbose logging. # debug_level=3 # Cache hashes of user authentication for offline auth. cache_credentials = True id_provider = ldap auth_provider = ldap access_provider = ldap chpass_provider = ldap ldap_schema = rfc2307 ldap_search_base = dc=example,dc=com ldap_uri = ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:38901 # If you have DNS SRV records, you can use the following instead. This derives # from your ldap_search_base. # ldap_uri = _srv_ ldap_tls_reqcert = demand # To use cacert dir, place *.crt files in this path then run: # /usr/bin/openssl rehash /etc/openldap/certs # or (for older versions of openssl) # /usr/bin/c_rehash /etc/openldap/certs ldap_tls_cacertdir = /etc/openldap/certs # Path to the cacert # ldap_tls_cacert = /etc/openldap/certs/ca.crt # Only users who match this filter can login and authorise to this machine. Note # that users who do NOT match, will still have their uid/gid resolve, but they # can't login. # ldap_access_filter = (memberOf=<dn>) enumerate = false access_provider = ldap ldap_user_member_of = memberof ldap_user_gecos = cn ldap_user_uuid = nsUniqueId ldap_group_uuid = nsUniqueId # This is really important as it allows SSSD to respect nsAccountLock ldap_account_expire_policy = rhds ldap_access_order = filter, expire # Setup for ssh keys # Inside /etc/ssh/sshd_config add the lines: # AuthorizedKeysCommand /usr/bin/sss_ssh_authorizedkeys # AuthorizedKeysCommandUser nobody # You can test with the command: sss_ssh_authorizedkeys <username> ldap_user_ssh_public_key = nsSshPublicKey # This prevents an issue where the Directory is recursively walked on group # and user look ups. It makes the client faster and more responsive in almost # every scenario. ignore_group_members = False [sssd] services = nss, pam, ssh, sudo config_file_version = 2 domains = ldap [nss] homedir_substring = /home # # sssd.conf # Generated by 389 Directory Server - dsidm # # For more details see man sssd.conf and man sssd-ldap # Be sure to review the content of this file to ensure it is secure and correct # in your environment. [domain/ldap] # Uncomment this for more verbose logging. # debug_level=3 # Cache hashes of user authentication for offline auth. cache_credentials = True id_provider = ldap auth_provider = ldap access_provider = ldap chpass_provider = ldap ldap_schema = rfc2307bis ldap_search_base = dc=example,dc=com ldap_uri = ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:38901 # If you have DNS SRV records, you can use the following instead. This derives # from your ldap_search_base. # ldap_uri = _srv_ ldap_tls_reqcert = demand # To use cacert dir, place *.crt files in this path then run: # /usr/bin/openssl rehash /etc/openldap/certs # or (for older versions of openssl) # /usr/bin/c_rehash /etc/openldap/certs ldap_tls_cacertdir = /etc/openldap/certs # Path to the cacert # ldap_tls_cacert = /etc/openldap/certs/ca.crt # Only users who match this filter can login and authorise to this machine. Note # that users who do NOT match, will still have their uid/gid resolve, but they # can't login. ldap_access_filter = (memberOf=cn=new_group,ou=groups,dc=example,dc=com) enumerate = false access_provider = ldap ldap_user_member_of = memberof ldap_user_gecos = cn ldap_user_uuid = nsUniqueId ldap_group_uuid = nsUniqueId # This is really important as it allows SSSD to respect nsAccountLock ldap_account_expire_policy = rhds ldap_access_order = filter, expire # Setup for ssh keys # Inside /etc/ssh/sshd_config add the lines: # AuthorizedKeysCommand /usr/bin/sss_ssh_authorizedkeys # AuthorizedKeysCommandUser nobody # You can test with the command: sss_ssh_authorizedkeys <username> ldap_user_ssh_public_key = nsSshPublicKey # This prevents an issue where the Directory is recursively walked on group # and user look ups. It makes the client faster and more responsive in almost # every scenario. ignore_group_members = False [sssd] services = nss, pam, ssh, sudo config_file_version = 2 domains = ldap [nss] homedir_substring = /home
-------------------------------Captured log call--------------------------------
INFO  tests.suites.clu.dsidm_config_test:dsidm_config_test.py:101 Create sssd.conf content DEBUG  tests.suites.clu.dsidm_config_test:client_config.py:114 # # sssd.conf # Generated by 389 Directory Server - dsidm # # For more details see man sssd.conf and man sssd-ldap # Be sure to review the content of this file to ensure it is secure and correct # in your environment. [domain/ldap] # Uncomment this for more verbose logging. # debug_level=3 # Cache hashes of user authentication for offline auth. cache_credentials = True id_provider = ldap auth_provider = ldap access_provider = ldap chpass_provider = ldap ldap_schema = rfc2307 ldap_search_base = dc=example,dc=com ldap_uri = ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:38901 # If you have DNS SRV records, you can use the following instead. This derives # from your ldap_search_base. # ldap_uri = _srv_ ldap_tls_reqcert = demand # To use cacert dir, place *.crt files in this path then run: # /usr/bin/openssl rehash /etc/openldap/certs # or (for older versions of openssl) # /usr/bin/c_rehash /etc/openldap/certs ldap_tls_cacertdir = /etc/openldap/certs # Path to the cacert # ldap_tls_cacert = /etc/openldap/certs/ca.crt # Only users who match this filter can login and authorise to this machine. Note # that users who do NOT match, will still have their uid/gid resolve, but they # can't login. # ldap_access_filter = (memberOf=<dn>) enumerate = false access_provider = ldap ldap_user_member_of = memberof ldap_user_gecos = cn ldap_user_uuid = nsUniqueId ldap_group_uuid = nsUniqueId # This is really important as it allows SSSD to respect nsAccountLock ldap_account_expire_policy = rhds ldap_access_order = filter, expire # Setup for ssh keys # Inside /etc/ssh/sshd_config add the lines: # AuthorizedKeysCommand /usr/bin/sss_ssh_authorizedkeys # AuthorizedKeysCommandUser nobody # You can test with the command: sss_ssh_authorizedkeys <username> ldap_user_ssh_public_key = nsSshPublicKey # This prevents an issue where the Directory is recursively walked on group # and user look ups. It makes the client faster and more responsive in almost # every scenario. ignore_group_members = False [sssd] services = nss, pam, ssh, sudo config_file_version = 2 domains = ldap [nss] homedir_substring = /home INFO  tests.suites.clu.dsidm_config_test:dsidm_config_test.py:104 Check if config creation was successful INFO  tests.suites.clu.dsidm_config_test:dsidm_config_test.py:51 Check if content is present in output INFO  tests.suites.clu.dsidm_config_test:dsidm_config_test.py:56 Check if value is present in output INFO  tests.suites.clu.dsidm_config_test:dsidm_config_test.py:59 Reset log file for next test INFO  tests.suites.clu.dsidm_config_test:dsidm_config_test.py:107 Now we test allowed_group argument INFO  tests.suites.clu.dsidm_config_test:dsidm_config_test.py:108 Enable MemberOf plugin INFO  tests.suites.clu.dsidm_config_test:dsidm_config_test.py:113 Create test group INFO  tests.suites.clu.dsidm_config_test:dsidm_config_test.py:118 Create sssd.conf content with allowed group DEBUG  tests.suites.clu.dsidm_config_test:client_config.py:114 # # sssd.conf # Generated by 389 Directory Server - dsidm # # For more details see man sssd.conf and man sssd-ldap # Be sure to review the content of this file to ensure it is secure and correct # in your environment. [domain/ldap] # Uncomment this for more verbose logging. # debug_level=3 # Cache hashes of user authentication for offline auth. cache_credentials = True id_provider = ldap auth_provider = ldap access_provider = ldap chpass_provider = ldap ldap_schema = rfc2307bis ldap_search_base = dc=example,dc=com ldap_uri = ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:38901 # If you have DNS SRV records, you can use the following instead. This derives # from your ldap_search_base. # ldap_uri = _srv_ ldap_tls_reqcert = demand # To use cacert dir, place *.crt files in this path then run: # /usr/bin/openssl rehash /etc/openldap/certs # or (for older versions of openssl) # /usr/bin/c_rehash /etc/openldap/certs ldap_tls_cacertdir = /etc/openldap/certs # Path to the cacert # ldap_tls_cacert = /etc/openldap/certs/ca.crt # Only users who match this filter can login and authorise to this machine. Note # that users who do NOT match, will still have their uid/gid resolve, but they # can't login. ldap_access_filter = (memberOf=cn=new_group,ou=groups,dc=example,dc=com) enumerate = false access_provider = ldap ldap_user_member_of = memberof ldap_user_gecos = cn ldap_user_uuid = nsUniqueId ldap_group_uuid = nsUniqueId # This is really important as it allows SSSD to respect nsAccountLock ldap_account_expire_policy = rhds ldap_access_order = filter, expire # Setup for ssh keys # Inside /etc/ssh/sshd_config add the lines: # AuthorizedKeysCommand /usr/bin/sss_ssh_authorizedkeys # AuthorizedKeysCommandUser nobody # You can test with the command: sss_ssh_authorizedkeys <username> ldap_user_ssh_public_key = nsSshPublicKey # This prevents an issue where the Directory is recursively walked on group # and user look ups. It makes the client faster and more responsive in almost # every scenario. ignore_group_members = False [sssd] services = nss, pam, ssh, sudo config_file_version = 2 domains = ldap [nss] homedir_substring = /home INFO  tests.suites.clu.dsidm_config_test:dsidm_config_test.py:123 Check if config creation was successful INFO  tests.suites.clu.dsidm_config_test:dsidm_config_test.py:47 Check if content is present in output INFO  tests.suites.clu.dsidm_config_test:dsidm_config_test.py:59 Reset log file for next test
Passed suites/clu/dsidm_config_test.py::test_dsidm_config_ldap 0.00
------------------------------Captured stdout call------------------------------
# # OpenLDAP client configuration # Generated by 389 Directory Server - dsidm # # See ldap.conf(5) for details # This file should be world readable but not world writable. BASE dc=example,dc=com # Remember to check this: you can have multiple uris on this line. You may have # multiple servers or load balancers in your environment. URI ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:38901 # If you have DNS SRV records you can use: # URI ldaps:///dc%3Dexample%2Cdc%3Dcom DEREF never # To use cacert dir, place *.crt files in this path then run: # /usr/bin/openssl rehash /etc/openldap/certs # or (for older versions of openssl) # /usr/bin/c_rehash /etc/openldap/certs TLS_CACERTDIR /etc/openldap/certs # TLS_CACERT /etc/openldap/certs/ca.crt
-------------------------------Captured log call--------------------------------
INFO  tests.suites.clu.dsidm_config_test:dsidm_config_test.py:151 Create ldap.conf content DEBUG  tests.suites.clu.dsidm_config_test:client_config.py:155 # # OpenLDAP client configuration # Generated by 389 Directory Server - dsidm # # See ldap.conf(5) for details # This file should be world readable but not world writable. BASE dc=example,dc=com # Remember to check this: you can have multiple uris on this line. You may have # multiple servers or load balancers in your environment. URI ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:38901 # If you have DNS SRV records you can use: # URI ldaps:///dc%3Dexample%2Cdc%3Dcom DEREF never # To use cacert dir, place *.crt files in this path then run: # /usr/bin/openssl rehash /etc/openldap/certs # or (for older versions of openssl) # /usr/bin/c_rehash /etc/openldap/certs TLS_CACERTDIR /etc/openldap/certs # TLS_CACERT /etc/openldap/certs/ca.crt INFO  tests.suites.clu.dsidm_config_test:dsidm_config_test.py:154 Check if config creation was successful INFO  tests.suites.clu.dsidm_config_test:dsidm_config_test.py:51 Check if content is present in output INFO  tests.suites.clu.dsidm_config_test:dsidm_config_test.py:59 Reset log file for next test
Passed suites/clu/dsidm_config_test.py::test_dsidm_config_display 3.93
------------------------------Captured stdout call------------------------------
# This is a generic list of LDAP client configuration parameters you may require # for connecting a client to this server. Some of them may or may not apply # to your application, so consult your application documentation for further # assistance. # # This program makes a number of assumptions about your data and configuration # which may not be correct. Be sure to check these values for your situation. ; ldap uri ; This is the uri of the server you will connect to and authenticate to. It ; must be a valid subjectAltName in the presented TLS certificate. Note that this ; is not an exhaustive list of your LDAP servers, and other applications in your ; network like load balancers may affect this. This is just what we derive from ; your current connection. ldap_uri = ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:38901 ; ldap dns discovery uri ; In some environments, you may have DNS SRV records such as ; "_ldap._tcp.<domain name>". If these are present in your dns server, you can ; use the following uri. ldap_uri = ldaps:///dc%3Dexample%2Cdc%3Dcom ; ca_cert ; To correctly use TLS, you require the valid CA cert that issued your LDAP TLS ; certificates. Sometimes a copy of this may be in your server instance as ca_cert = /etc/dirsrv/slapd-<instance>/ca.crt ; However that's not guaranteed. You can show the certs from the LDAP server ; by sshing to the server and running: certutil -L -d /etc/dirsrv/slapd-<instance>/ ; If you can identify the CA certificate name, you can then view it with: certutil -L -n <ca cert name> -a -d /etc/dirsrv/slapd-<instance>/ ; This should be a pem file you can use in your application's CA. ; Some applications don't require a ca certificate parameter, and will use the ; ca certificate from /etc/openldap/ldap.conf. You should configure ldap.conf ; in these cases. See the 'client_config ldap.conf' command in dsidm. ; basedn ; The basedn is the root suffix where all searches will originate from for ; LDAP objects. basedn = dc=example,dc=com ; schema_type ; LDAP servers have different ways to structure their objects and group ; relationships. Legacy servers will use rfc2307, where as modern servers will ; use rfc2307bis (requires MemberOf plugin to be enabled). This is the schema ; setting of your directory based on your running configuration (if we can ; detect it). schema_type = rfc2307bis ; user/account basedn ; Some applications may optionally use a user/account basedn to limit searches ; in the directory. This can be for performance or security reasons. Generally ; you shouldn't need this, preferring to use groups and filters for access ; control. user_basedn = ou=people,dc=example,dc=com ; user filter ; This is an ldap filter that will return only user objects. Additionally some ; applications will template into the filter (similar to sql statements) or they ; will generate the filter based on attributes. We list a number of possible ; filters you might use, but you should customise this for your application. ; ; If you are using rfc2307bis, you can use this filter to provide authorisation ; support by adding filters such as: (memberOf=<groupdn>) user_filter = (&(objectclass=nsPerson)(objectclass=nsAccount)(objectclass=nsOrgPerson)(objectclass=posixAccount)) user_filter = (&(&(objectclass=nsPerson)(objectclass=nsAccount)(objectclass=nsOrgPerson)(objectclass=posixAccount))(|(uid=<PARAM>)(displayName=<PARAM>)(cn=<PARAM>))) ; group basedn ; Some applications may optionnaly use a group basedn to limit searches in the ; directory. This can be for performance or security reasons. Generally you ; shouldn't need this, preferring to use groups and filters for access control. group_basedn = ou=Groups,dc=example,dc=com ; group filter ; This is an ldap filter that will return only group objects. Additionally ; some applications will template into the filter (similar to sql statements) ; or they will generate the filter base on attributes. We list a number of ; possible filters you might use, but you should customise this for your ; application. group_filter = (&(objectclass=groupOfNames)) group_filter = (&(&(objectclass=groupOfNames))(|(cn=<PARAM>))) ; attribute mappings ; Due to the variety of schemas and attribute mappings in LDAP, there are ; different representations of attributes and values. This is a guess at ; the mappings that exist in your server, and what attributes you should ; configure and use. unique id = nsUniqueId user rdn = uid user identifier = uid group rdn = cn group member attribute = member # This is a generic list of LDAP client configuration parameters you may require # for connecting a client to this server. Some of them may or may not apply # to your application, so consult your application documentation for further # assistance. # # This program makes a number of assumptions about your data and configuration # which may not be correct. Be sure to check these values for your situation. ; ldap uri ; This is the uri of the server you will connect to and authenticate to. It ; must be a valid subjectAltName in the presented TLS certificate. Note that this ; is not an exhaustive list of your LDAP servers, and other applications in your ; network like load balancers may affect this. This is just what we derive from ; your current connection. ldap_uri = ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:38901 ; ldap dns discovery uri ; In some environments, you may have DNS SRV records such as ; "_ldap._tcp.<domain name>". If these are present in your dns server, you can ; use the following uri. ldap_uri = ldaps:///dc%3Dexample%2Cdc%3Dcom ; ca_cert ; To correctly use TLS, you require the valid CA cert that issued your LDAP TLS ; certificates. Sometimes a copy of this may be in your server instance as ca_cert = /etc/dirsrv/slapd-<instance>/ca.crt ; However that's not guaranteed. You can show the certs from the LDAP server ; by sshing to the server and running: certutil -L -d /etc/dirsrv/slapd-<instance>/ ; If you can identify the CA certificate name, you can then view it with: certutil -L -n <ca cert name> -a -d /etc/dirsrv/slapd-<instance>/ ; This should be a pem file you can use in your application's CA. ; Some applications don't require a ca certificate parameter, and will use the ; ca certificate from /etc/openldap/ldap.conf. You should configure ldap.conf ; in these cases. See the 'client_config ldap.conf' command in dsidm. ; basedn ; The basedn is the root suffix where all searches will originate from for ; LDAP objects. basedn = dc=example,dc=com ; schema_type ; LDAP servers have different ways to structure their objects and group ; relationships. Legacy servers will use rfc2307, where as modern servers will ; use rfc2307bis (requires MemberOf plugin to be enabled). This is the schema ; setting of your directory based on your running configuration (if we can ; detect it). schema_type = rfc2307bis ; user/account basedn ; Some applications may optionally use a user/account basedn to limit searches ; in the directory. This can be for performance or security reasons. Generally ; you shouldn't need this, preferring to use groups and filters for access ; control. user_basedn = ou=people,dc=example,dc=com ; user filter ; This is an ldap filter that will return only user objects. Additionally some ; applications will template into the filter (similar to sql statements) or they ; will generate the filter based on attributes. We list a number of possible ; filters you might use, but you should customise this for your application. ; ; If you are using rfc2307bis, you can use this filter to provide authorisation ; support by adding filters such as: (memberOf=<groupdn>) user_filter = (&(objectclass=nsPerson)(objectclass=nsAccount)(objectclass=nsOrgPerson)(objectclass=posixAccount)) user_filter = (&(&(objectclass=nsPerson)(objectclass=nsAccount)(objectclass=nsOrgPerson)(objectclass=posixAccount))(|(uid=<PARAM>)(displayName=<PARAM>)(cn=<PARAM>))) ; group basedn ; Some applications may optionnaly use a group basedn to limit searches in the ; directory. This can be for performance or security reasons. Generally you ; shouldn't need this, preferring to use groups and filters for access control. group_basedn = ou=Groups,dc=example,dc=com ; group filter ; This is an ldap filter that will return only group objects. Additionally ; some applications will template into the filter (similar to sql statements) ; or they will generate the filter base on attributes. We list a number of ; possible filters you might use, but you should customise this for your ; application. group_filter = (&(objectclass=groupOfNames)) group_filter = (&(&(objectclass=groupOfNames))(|(cn=<PARAM>))) ; attribute mappings ; Due to the variety of schemas and attribute mappings in LDAP, there are ; different representations of attributes and values. This is a guess at ; the mappings that exist in your server, and what attributes you should ; configure and use. unique id = nsUniqueId user rdn = uid user identifier = uid group rdn = cn group member attribute = member
-------------------------------Captured log call--------------------------------
INFO  tests.suites.clu.dsidm_config_test:dsidm_config_test.py:195 Test dsidm display option DEBUG  tests.suites.clu.dsidm_config_test:client_config.py:290 # This is a generic list of LDAP client configuration parameters you may require # for connecting a client to this server. Some of them may or may not apply # to your application, so consult your application documentation for further # assistance. # # This program makes a number of assumptions about your data and configuration # which may not be correct. Be sure to check these values for your situation. ; ldap uri ; This is the uri of the server you will connect to and authenticate to. It ; must be a valid subjectAltName in the presented TLS certificate. Note that this ; is not an exhaustive list of your LDAP servers, and other applications in your ; network like load balancers may affect this. This is just what we derive from ; your current connection. ldap_uri = ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:38901 ; ldap dns discovery uri ; In some environments, you may have DNS SRV records such as ; "_ldap._tcp.<domain name>". If these are present in your dns server, you can ; use the following uri. ldap_uri = ldaps:///dc%3Dexample%2Cdc%3Dcom ; ca_cert ; To correctly use TLS, you require the valid CA cert that issued your LDAP TLS ; certificates. Sometimes a copy of this may be in your server instance as ca_cert = /etc/dirsrv/slapd-<instance>/ca.crt ; However that's not guaranteed. You can show the certs from the LDAP server ; by sshing to the server and running: certutil -L -d /etc/dirsrv/slapd-<instance>/ ; If you can identify the CA certificate name, you can then view it with: certutil -L -n <ca cert name> -a -d /etc/dirsrv/slapd-<instance>/ ; This should be a pem file you can use in your application's CA. ; Some applications don't require a ca certificate parameter, and will use the ; ca certificate from /etc/openldap/ldap.conf. You should configure ldap.conf ; in these cases. See the 'client_config ldap.conf' command in dsidm. ; basedn ; The basedn is the root suffix where all searches will originate from for ; LDAP objects. basedn = dc=example,dc=com ; schema_type ; LDAP servers have different ways to structure their objects and group ; relationships. Legacy servers will use rfc2307, where as modern servers will ; use rfc2307bis (requires MemberOf plugin to be enabled). This is the schema ; setting of your directory based on your running configuration (if we can ; detect it). schema_type = rfc2307bis ; user/account basedn ; Some applications may optionally use a user/account basedn to limit searches ; in the directory. This can be for performance or security reasons. Generally ; you shouldn't need this, preferring to use groups and filters for access ; control. user_basedn = ou=people,dc=example,dc=com ; user filter ; This is an ldap filter that will return only user objects. Additionally some ; applications will template into the filter (similar to sql statements) or they ; will generate the filter based on attributes. We list a number of possible ; filters you might use, but you should customise this for your application. ; ; If you are using rfc2307bis, you can use this filter to provide authorisation ; support by adding filters such as: (memberOf=<groupdn>) user_filter = (&(objectclass=nsPerson)(objectclass=nsAccount)(objectclass=nsOrgPerson)(objectclass=posixAccount)) user_filter = (&(&(objectclass=nsPerson)(objectclass=nsAccount)(objectclass=nsOrgPerson)(objectclass=posixAccount))(|(uid=<PARAM>)(displayName=<PARAM>)(cn=<PARAM>))) ; group basedn ; Some applications may optionnaly use a group basedn to limit searches in the ; directory. This can be for performance or security reasons. Generally you ; shouldn't need this, preferring to use groups and filters for access control. group_basedn = ou=Groups,dc=example,dc=com ; group filter ; This is an ldap filter that will return only group objects. Additionally ; some applications will template into the filter (similar to sql statements) ; or they will generate the filter base on attributes. We list a number of ; possible filters you might use, but you should customise this for your ; application. group_filter = (&(objectclass=groupOfNames)) group_filter = (&(&(objectclass=groupOfNames))(|(cn=<PARAM>))) ; attribute mappings ; Due to the variety of schemas and attribute mappings in LDAP, there are ; different representations of attributes and values. This is a guess at ; the mappings that exist in your server, and what attributes you should ; configure and use. unique id = nsUniqueId user rdn = uid user identifier = uid group rdn = cn group member attribute = member INFO  tests.suites.clu.dsidm_config_test:dsidm_config_test.py:198 Check if display option was successful INFO  tests.suites.clu.dsidm_config_test:dsidm_config_test.py:51 Check if content is present in output INFO  tests.suites.clu.dsidm_config_test:dsidm_config_test.py:56 Check if value is present in output INFO  tests.suites.clu.dsidm_config_test:dsidm_config_test.py:59 Reset log file for next test INFO  tests.suites.clu.dsidm_config_test:dsidm_config_test.py:201 Enable MemberOf plugin INFO  tests.suites.clu.dsidm_config_test:dsidm_config_test.py:206 Test dsidm display option with MemberOf plugin DEBUG  tests.suites.clu.dsidm_config_test:client_config.py:290 # This is a generic list of LDAP client configuration parameters you may require # for connecting a client to this server. Some of them may or may not apply # to your application, so consult your application documentation for further # assistance. # # This program makes a number of assumptions about your data and configuration # which may not be correct. Be sure to check these values for your situation. ; ldap uri ; This is the uri of the server you will connect to and authenticate to. It ; must be a valid subjectAltName in the presented TLS certificate. Note that this ; is not an exhaustive list of your LDAP servers, and other applications in your ; network like load balancers may affect this. This is just what we derive from ; your current connection. ldap_uri = ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:38901 ; ldap dns discovery uri ; In some environments, you may have DNS SRV records such as ; "_ldap._tcp.<domain name>". If these are present in your dns server, you can ; use the following uri. ldap_uri = ldaps:///dc%3Dexample%2Cdc%3Dcom ; ca_cert ; To correctly use TLS, you require the valid CA cert that issued your LDAP TLS ; certificates. Sometimes a copy of this may be in your server instance as ca_cert = /etc/dirsrv/slapd-<instance>/ca.crt ; However that's not guaranteed. You can show the certs from the LDAP server ; by sshing to the server and running: certutil -L -d /etc/dirsrv/slapd-<instance>/ ; If you can identify the CA certificate name, you can then view it with: certutil -L -n <ca cert name> -a -d /etc/dirsrv/slapd-<instance>/ ; This should be a pem file you can use in your application's CA. ; Some applications don't require a ca certificate parameter, and will use the ; ca certificate from /etc/openldap/ldap.conf. You should configure ldap.conf ; in these cases. See the 'client_config ldap.conf' command in dsidm. ; basedn ; The basedn is the root suffix where all searches will originate from for ; LDAP objects. basedn = dc=example,dc=com ; schema_type ; LDAP servers have different ways to structure their objects and group ; relationships. Legacy servers will use rfc2307, where as modern servers will ; use rfc2307bis (requires MemberOf plugin to be enabled). This is the schema ; setting of your directory based on your running configuration (if we can ; detect it). schema_type = rfc2307bis ; user/account basedn ; Some applications may optionally use a user/account basedn to limit searches ; in the directory. This can be for performance or security reasons. Generally ; you shouldn't need this, preferring to use groups and filters for access ; control. user_basedn = ou=people,dc=example,dc=com ; user filter ; This is an ldap filter that will return only user objects. Additionally some ; applications will template into the filter (similar to sql statements) or they ; will generate the filter based on attributes. We list a number of possible ; filters you might use, but you should customise this for your application. ; ; If you are using rfc2307bis, you can use this filter to provide authorisation ; support by adding filters such as: (memberOf=<groupdn>) user_filter = (&(objectclass=nsPerson)(objectclass=nsAccount)(objectclass=nsOrgPerson)(objectclass=posixAccount)) user_filter = (&(&(objectclass=nsPerson)(objectclass=nsAccount)(objectclass=nsOrgPerson)(objectclass=posixAccount))(|(uid=<PARAM>)(displayName=<PARAM>)(cn=<PARAM>))) ; group basedn ; Some applications may optionnaly use a group basedn to limit searches in the ; directory. This can be for performance or security reasons. Generally you ; shouldn't need this, preferring to use groups and filters for access control. group_basedn = ou=Groups,dc=example,dc=com ; group filter ; This is an ldap filter that will return only group objects. Additionally ; some applications will template into the filter (similar to sql statements) ; or they will generate the filter base on attributes. We list a number of ; possible filters you might use, but you should customise this for your ; application. group_filter = (&(objectclass=groupOfNames)) group_filter = (&(&(objectclass=groupOfNames))(|(cn=<PARAM>))) ; attribute mappings ; Due to the variety of schemas and attribute mappings in LDAP, there are ; different representations of attributes and values. This is a guess at ; the mappings that exist in your server, and what attributes you should ; configure and use. unique id = nsUniqueId user rdn = uid user identifier = uid group rdn = cn group member attribute = member INFO  tests.suites.clu.dsidm_config_test:dsidm_config_test.py:209 Check if display option was successful with MemberOf plugin enabled INFO  tests.suites.clu.dsidm_config_test:dsidm_config_test.py:51 Check if content is present in output INFO  tests.suites.clu.dsidm_config_test:dsidm_config_test.py:56 Check if value is present in output INFO  tests.suites.clu.dsidm_config_test:dsidm_config_test.py:59 Reset log file for next test
Passed suites/clu/fixup_test.py::test_posix_winsync_fixup 8.08
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.suites.clu.fixup_test:fixup_test.py:73 Enable POSIXWinsyncPlugin INFO  tests.suites.clu.fixup_test:fixup_test.py:77 Stopping the server and importing posix accounts INFO  tests.suites.clu.fixup_test:fixup_test.py:87 Run Fixup task INFO  tests.suites.clu.fixup_test:posix_winsync.py:29 Attempting to add task entry... INFO  tests.suites.clu.fixup_test:posix_winsync.py:39 Successfully added task entry INFO  tests.suites.clu.fixup_test:fixup_test.py:90 Check log if fixup task was successful
Passed suites/config/autotuning_test.py::test_threads_basic 0.02
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.suites.config.autotuning_test:autotuning_test.py:39 Set nsslapd-threadnumber: -1 to enable autotuning INFO  tests.suites.config.autotuning_test:autotuning_test.py:42 Assert nsslapd-threadnumber is equal to the documented expected value
Passed suites/config/autotuning_test.py::test_threads_warning 1.06
No log output captured.
Passed suites/config/autotuning_test.py::test_threads_invalid_value[-2] 0.17
-------------------------------Captured log call--------------------------------
INFO  tests.suites.config.autotuning_test:autotuning_test.py:87 Set nsslapd-threadnumber: -2. Operation should fail
Passed suites/config/autotuning_test.py::test_threads_invalid_value[0] 0.01
-------------------------------Captured log call--------------------------------
INFO  tests.suites.config.autotuning_test:autotuning_test.py:87 Set nsslapd-threadnumber: 0. Operation should fail
Passed suites/config/autotuning_test.py::test_threads_invalid_value[invalid] 0.01
-------------------------------Captured log call--------------------------------
INFO  tests.suites.config.autotuning_test:autotuning_test.py:87 Set nsslapd-threadnumber: invalid. Operation should fail
Passed suites/config/autotuning_test.py::test_threads_back_from_manual_value 0.06
-------------------------------Captured log call--------------------------------
INFO  tests.suites.config.autotuning_test:autotuning_test.py:109 Set nsslapd-threadnumber: -1 to enable autotuning and save the new value INFO  tests.suites.config.autotuning_test:autotuning_test.py:113 Set nsslapd-threadnumber to the autotuned value decreased by 2 INFO  tests.suites.config.autotuning_test:autotuning_test.py:118 Set nsslapd-threadnumber: -1 to enable autotuning INFO  tests.suites.config.autotuning_test:autotuning_test.py:121 Assert nsslapd-threadnumber is back to the autotuned value
Passed suites/config/autotuning_test.py::test_cache_autosize_non_zero[-] 3.70
-------------------------------Captured log call--------------------------------
INFO  tests.suites.config.autotuning_test:autotuning_test.py:169 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test INFO  tests.suites.config.autotuning_test:autotuning_test.py:170 nsslapd-dbcachesize == b'408620441' INFO  tests.suites.config.autotuning_test:autotuning_test.py:171 nsslapd-cachememsize == b'1409286144' INFO  tests.suites.config.autotuning_test:autotuning_test.py:172 nsslapd-dncachememsize == b'201326592' INFO  tests.suites.config.autotuning_test:autotuning_test.py:173 nsslapd-cache-autosize == b'25' INFO  tests.suites.config.autotuning_test:autotuning_test.py:174 nsslapd-cache-autosize-split == b'25' INFO  tests.suites.config.autotuning_test:autotuning_test.py:180 Delete nsslapd-cache-autosize INFO  tests.suites.config.autotuning_test:autotuning_test.py:190 Delete nsslapd-cache-autosize-split INFO  tests.suites.config.autotuning_test:autotuning_test.py:196 Trying to set nsslapd-cachememsize to 33333333 INFO  tests.suites.config.autotuning_test:autotuning_test.py:199 Trying to set nsslapd-dbcachesize to 33333333 INFO  tests.suites.config.autotuning_test:autotuning_test.py:216 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. INFO  tests.suites.config.autotuning_test:autotuning_test.py:217 nsslapd-dbcachesize == b'408620441' INFO  tests.suites.config.autotuning_test:autotuning_test.py:218 nsslapd-cachememsize == b'1409286144' INFO  tests.suites.config.autotuning_test:autotuning_test.py:219 nsslapd-dncachememsize == b'201326592' INFO  tests.suites.config.autotuning_test:autotuning_test.py:220 nsslapd-cache-autosize == b'25' INFO  tests.suites.config.autotuning_test:autotuning_test.py:221 nsslapd-cache-autosize-split == b'25'
Passed suites/config/autotuning_test.py::test_cache_autosize_non_zero[-0] 4.48
-------------------------------Captured log call--------------------------------
INFO  tests.suites.config.autotuning_test:autotuning_test.py:169 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test INFO  tests.suites.config.autotuning_test:autotuning_test.py:170 nsslapd-dbcachesize == b'408620441' INFO  tests.suites.config.autotuning_test:autotuning_test.py:171 nsslapd-cachememsize == b'1409286144' INFO  tests.suites.config.autotuning_test:autotuning_test.py:172 nsslapd-dncachememsize == b'201326592' INFO  tests.suites.config.autotuning_test:autotuning_test.py:173 nsslapd-cache-autosize == b'25' INFO  tests.suites.config.autotuning_test:autotuning_test.py:174 nsslapd-cache-autosize-split == b'25' INFO  tests.suites.config.autotuning_test:autotuning_test.py:180 Delete nsslapd-cache-autosize INFO  tests.suites.config.autotuning_test:autotuning_test.py:187 Set nsslapd-cache-autosize-split to 0 INFO  tests.suites.config.autotuning_test:autotuning_test.py:196 Trying to set nsslapd-cachememsize to 33333333 INFO  tests.suites.config.autotuning_test:autotuning_test.py:199 Trying to set nsslapd-dbcachesize to 33333333 INFO  tests.suites.config.autotuning_test:autotuning_test.py:216 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. INFO  tests.suites.config.autotuning_test:autotuning_test.py:217 nsslapd-dbcachesize == b'408620441' INFO  tests.suites.config.autotuning_test:autotuning_test.py:218 nsslapd-cachememsize == b'1409286144' INFO  tests.suites.config.autotuning_test:autotuning_test.py:219 nsslapd-dncachememsize == b'201326592' INFO  tests.suites.config.autotuning_test:autotuning_test.py:220 nsslapd-cache-autosize == b'25' INFO  tests.suites.config.autotuning_test:autotuning_test.py:221 nsslapd-cache-autosize-split == b'0'
Passed suites/config/autotuning_test.py::test_cache_autosize_non_zero[10-400] 4.14
-------------------------------Captured log call--------------------------------
INFO  tests.suites.config.autotuning_test:autotuning_test.py:169 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test INFO  tests.suites.config.autotuning_test:autotuning_test.py:170 nsslapd-dbcachesize == b'408620441' INFO  tests.suites.config.autotuning_test:autotuning_test.py:171 nsslapd-cachememsize == b'1409286144' INFO  tests.suites.config.autotuning_test:autotuning_test.py:172 nsslapd-dncachememsize == b'201326592' INFO  tests.suites.config.autotuning_test:autotuning_test.py:173 nsslapd-cache-autosize == b'25' INFO  tests.suites.config.autotuning_test:autotuning_test.py:174 nsslapd-cache-autosize-split == b'0' INFO  tests.suites.config.autotuning_test:autotuning_test.py:177 Set nsslapd-cache-autosize to 10 INFO  tests.suites.config.autotuning_test:autotuning_test.py:187 Set nsslapd-cache-autosize-split to 40 INFO  tests.suites.config.autotuning_test:autotuning_test.py:196 Trying to set nsslapd-cachememsize to 33333333 INFO  tests.suites.config.autotuning_test:autotuning_test.py:199 Trying to set nsslapd-dbcachesize to 33333333 INFO  tests.suites.config.autotuning_test:autotuning_test.py:216 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. INFO  tests.suites.config.autotuning_test:autotuning_test.py:217 nsslapd-dbcachesize == b'261517082' INFO  tests.suites.config.autotuning_test:autotuning_test.py:218 nsslapd-cachememsize == b'469762048' INFO  tests.suites.config.autotuning_test:autotuning_test.py:219 nsslapd-dncachememsize == b'67108864' INFO  tests.suites.config.autotuning_test:autotuning_test.py:220 nsslapd-cache-autosize == b'10' INFO  tests.suites.config.autotuning_test:autotuning_test.py:221 nsslapd-cache-autosize-split == b'40'
Passed suites/config/autotuning_test.py::test_cache_autosize_non_zero[-40] 4.98
-------------------------------Captured log call--------------------------------
INFO  tests.suites.config.autotuning_test:autotuning_test.py:169 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test INFO  tests.suites.config.autotuning_test:autotuning_test.py:170 nsslapd-dbcachesize == b'261517082' INFO  tests.suites.config.autotuning_test:autotuning_test.py:171 nsslapd-cachememsize == b'469762048' INFO  tests.suites.config.autotuning_test:autotuning_test.py:172 nsslapd-dncachememsize == b'67108864' INFO  tests.suites.config.autotuning_test:autotuning_test.py:173 nsslapd-cache-autosize == b'10' INFO  tests.suites.config.autotuning_test:autotuning_test.py:174 nsslapd-cache-autosize-split == b'40' INFO  tests.suites.config.autotuning_test:autotuning_test.py:180 Delete nsslapd-cache-autosize INFO  tests.suites.config.autotuning_test:autotuning_test.py:187 Set nsslapd-cache-autosize-split to 40 INFO  tests.suites.config.autotuning_test:autotuning_test.py:196 Trying to set nsslapd-cachememsize to 33333333 INFO  tests.suites.config.autotuning_test:autotuning_test.py:199 Trying to set nsslapd-dbcachesize to 33333333 INFO  tests.suites.config.autotuning_test:autotuning_test.py:216 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. INFO  tests.suites.config.autotuning_test:autotuning_test.py:217 nsslapd-dbcachesize == b'817240883' INFO  tests.suites.config.autotuning_test:autotuning_test.py:218 nsslapd-cachememsize == b'1140850688' INFO  tests.suites.config.autotuning_test:autotuning_test.py:219 nsslapd-dncachememsize == b'134217728' INFO  tests.suites.config.autotuning_test:autotuning_test.py:220 nsslapd-cache-autosize == b'25' INFO  tests.suites.config.autotuning_test:autotuning_test.py:221 nsslapd-cache-autosize-split == b'40'
Passed suites/config/autotuning_test.py::test_cache_autosize_non_zero[10-] 4.40
-------------------------------Captured log call--------------------------------
INFO  tests.suites.config.autotuning_test:autotuning_test.py:169 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test INFO  tests.suites.config.autotuning_test:autotuning_test.py:170 nsslapd-dbcachesize == b'817240883' INFO  tests.suites.config.autotuning_test:autotuning_test.py:171 nsslapd-cachememsize == b'1140850688' INFO  tests.suites.config.autotuning_test:autotuning_test.py:172 nsslapd-dncachememsize == b'134217728' INFO  tests.suites.config.autotuning_test:autotuning_test.py:173 nsslapd-cache-autosize == b'25' INFO  tests.suites.config.autotuning_test:autotuning_test.py:174 nsslapd-cache-autosize-split == b'40' INFO  tests.suites.config.autotuning_test:autotuning_test.py:177 Set nsslapd-cache-autosize to 10 INFO  tests.suites.config.autotuning_test:autotuning_test.py:190 Delete nsslapd-cache-autosize-split INFO  tests.suites.config.autotuning_test:autotuning_test.py:196 Trying to set nsslapd-cachememsize to 33333333 INFO  tests.suites.config.autotuning_test:autotuning_test.py:199 Trying to set nsslapd-dbcachesize to 33333333 INFO  tests.suites.config.autotuning_test:autotuning_test.py:216 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. INFO  tests.suites.config.autotuning_test:autotuning_test.py:217 nsslapd-dbcachesize == b'163448176' INFO  tests.suites.config.autotuning_test:autotuning_test.py:218 nsslapd-cachememsize == b'603979776' INFO  tests.suites.config.autotuning_test:autotuning_test.py:219 nsslapd-dncachememsize == b'67108864' INFO  tests.suites.config.autotuning_test:autotuning_test.py:220 nsslapd-cache-autosize == b'10' INFO  tests.suites.config.autotuning_test:autotuning_test.py:221 nsslapd-cache-autosize-split == b'25'
Passed suites/config/autotuning_test.py::test_cache_autosize_non_zero[10-401] 4.56
-------------------------------Captured log call--------------------------------
INFO  tests.suites.config.autotuning_test:autotuning_test.py:169 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test INFO  tests.suites.config.autotuning_test:autotuning_test.py:170 nsslapd-dbcachesize == b'163448176' INFO  tests.suites.config.autotuning_test:autotuning_test.py:171 nsslapd-cachememsize == b'603979776' INFO  tests.suites.config.autotuning_test:autotuning_test.py:172 nsslapd-dncachememsize == b'67108864' INFO  tests.suites.config.autotuning_test:autotuning_test.py:173 nsslapd-cache-autosize == b'10' INFO  tests.suites.config.autotuning_test:autotuning_test.py:174 nsslapd-cache-autosize-split == b'25' INFO  tests.suites.config.autotuning_test:autotuning_test.py:177 Set nsslapd-cache-autosize to 10 INFO  tests.suites.config.autotuning_test:autotuning_test.py:187 Set nsslapd-cache-autosize-split to 40 INFO  tests.suites.config.autotuning_test:autotuning_test.py:196 Trying to set nsslapd-cachememsize to 33333333 INFO  tests.suites.config.autotuning_test:autotuning_test.py:199 Trying to set nsslapd-dbcachesize to 33333333 INFO  tests.suites.config.autotuning_test:autotuning_test.py:216 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. INFO  tests.suites.config.autotuning_test:autotuning_test.py:217 nsslapd-dbcachesize == b'261517082' INFO  tests.suites.config.autotuning_test:autotuning_test.py:218 nsslapd-cachememsize == b'469762048' INFO  tests.suites.config.autotuning_test:autotuning_test.py:219 nsslapd-dncachememsize == b'67108864' INFO  tests.suites.config.autotuning_test:autotuning_test.py:220 nsslapd-cache-autosize == b'10' INFO  tests.suites.config.autotuning_test:autotuning_test.py:221 nsslapd-cache-autosize-split == b'40'
Passed suites/config/autotuning_test.py::test_cache_autosize_non_zero[10-0] 4.61
-------------------------------Captured log call--------------------------------
INFO  tests.suites.config.autotuning_test:autotuning_test.py:169 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test INFO  tests.suites.config.autotuning_test:autotuning_test.py:170 nsslapd-dbcachesize == b'261517082' INFO  tests.suites.config.autotuning_test:autotuning_test.py:171 nsslapd-cachememsize == b'469762048' INFO  tests.suites.config.autotuning_test:autotuning_test.py:172 nsslapd-dncachememsize == b'67108864' INFO  tests.suites.config.autotuning_test:autotuning_test.py:173 nsslapd-cache-autosize == b'10' INFO  tests.suites.config.autotuning_test:autotuning_test.py:174 nsslapd-cache-autosize-split == b'40' INFO  tests.suites.config.autotuning_test:autotuning_test.py:177 Set nsslapd-cache-autosize to 10 INFO  tests.suites.config.autotuning_test:autotuning_test.py:187 Set nsslapd-cache-autosize-split to 0 INFO  tests.suites.config.autotuning_test:autotuning_test.py:196 Trying to set nsslapd-cachememsize to 33333333 INFO  tests.suites.config.autotuning_test:autotuning_test.py:199 Trying to set nsslapd-dbcachesize to 33333333 INFO  tests.suites.config.autotuning_test:autotuning_test.py:216 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. INFO  tests.suites.config.autotuning_test:autotuning_test.py:217 nsslapd-dbcachesize == b'163448176' INFO  tests.suites.config.autotuning_test:autotuning_test.py:218 nsslapd-cachememsize == b'603979776' INFO  tests.suites.config.autotuning_test:autotuning_test.py:219 nsslapd-dncachememsize == b'67108864' INFO  tests.suites.config.autotuning_test:autotuning_test.py:220 nsslapd-cache-autosize == b'10' INFO  tests.suites.config.autotuning_test:autotuning_test.py:221 nsslapd-cache-autosize-split == b'0'
Passed suites/config/autotuning_test.py::test_cache_autosize_basic_sane[0] 9.95
-------------------------------Captured log call--------------------------------
INFO  tests.suites.config.autotuning_test:autotuning_test.py:273 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test INFO  tests.suites.config.autotuning_test:autotuning_test.py:274 nsslapd-dbcachesize == b'163448176' INFO  tests.suites.config.autotuning_test:autotuning_test.py:275 nsslapd-cachememsize == b'603979776' INFO  tests.suites.config.autotuning_test:autotuning_test.py:276 nsslapd-cache-autosize == b'0' INFO  tests.suites.config.autotuning_test:autotuning_test.py:277 nsslapd-cache-autosize-split == b'0' INFO  tests.suites.config.autotuning_test:autotuning_test.py:280 Set nsslapd-cache-autosize-split to 0 INFO  tests.suites.config.autotuning_test:autotuning_test.py:289 Set nsslapd-dbcachesize to 0 INFO  tests.suites.config.autotuning_test:autotuning_test.py:291 Set nsslapd-cachememsize to 0 INFO  tests.suites.config.autotuning_test:autotuning_test.py:307 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. INFO  tests.suites.config.autotuning_test:autotuning_test.py:308 nsslapd-dbcachesize == b'408620441' INFO  tests.suites.config.autotuning_test:autotuning_test.py:309 nsslapd-cachememsize == b'1409286144' INFO  tests.suites.config.autotuning_test:autotuning_test.py:310 nsslapd-dncachememsize == b'67108864' INFO  tests.suites.config.autotuning_test:autotuning_test.py:311 nsslapd-cache-autosize == b'0' INFO  tests.suites.config.autotuning_test:autotuning_test.py:312 nsslapd-cache-autosize-split == b'0' INFO  tests.suites.config.autotuning_test:autotuning_test.py:273 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test INFO  tests.suites.config.autotuning_test:autotuning_test.py:274 nsslapd-dbcachesize == b'408620441' INFO  tests.suites.config.autotuning_test:autotuning_test.py:275 nsslapd-cachememsize == b'1409286144' INFO  tests.suites.config.autotuning_test:autotuning_test.py:276 nsslapd-cache-autosize == b'0' INFO  tests.suites.config.autotuning_test:autotuning_test.py:277 nsslapd-cache-autosize-split == b'0' INFO  tests.suites.config.autotuning_test:autotuning_test.py:280 Set nsslapd-cache-autosize-split to 0 INFO  tests.suites.config.autotuning_test:autotuning_test.py:289 Set nsslapd-dbcachesize to 33333333 INFO  tests.suites.config.autotuning_test:autotuning_test.py:291 Set nsslapd-cachememsize to 33333333 INFO  tests.suites.config.autotuning_test:autotuning_test.py:307 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. INFO  tests.suites.config.autotuning_test:autotuning_test.py:308 nsslapd-dbcachesize == b'33333333' INFO  tests.suites.config.autotuning_test:autotuning_test.py:309 nsslapd-cachememsize == b'33333333' INFO  tests.suites.config.autotuning_test:autotuning_test.py:310 nsslapd-dncachememsize == b'67108864' INFO  tests.suites.config.autotuning_test:autotuning_test.py:311 nsslapd-cache-autosize == b'0' INFO  tests.suites.config.autotuning_test:autotuning_test.py:312 nsslapd-cache-autosize-split == b'0'
Passed suites/config/autotuning_test.py::test_cache_autosize_basic_sane[] 8.88
-------------------------------Captured log call--------------------------------
INFO  tests.suites.config.autotuning_test:autotuning_test.py:273 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test INFO  tests.suites.config.autotuning_test:autotuning_test.py:274 nsslapd-dbcachesize == b'33333333' INFO  tests.suites.config.autotuning_test:autotuning_test.py:275 nsslapd-cachememsize == b'33333333' INFO  tests.suites.config.autotuning_test:autotuning_test.py:276 nsslapd-cache-autosize == b'0' INFO  tests.suites.config.autotuning_test:autotuning_test.py:277 nsslapd-cache-autosize-split == b'0' INFO  tests.suites.config.autotuning_test:autotuning_test.py:283 Delete nsslapd-cache-autosize-split INFO  tests.suites.config.autotuning_test:autotuning_test.py:289 Set nsslapd-dbcachesize to 0 INFO  tests.suites.config.autotuning_test:autotuning_test.py:291 Set nsslapd-cachememsize to 0 INFO  tests.suites.config.autotuning_test:autotuning_test.py:307 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. INFO  tests.suites.config.autotuning_test:autotuning_test.py:308 nsslapd-dbcachesize == b'408620441' INFO  tests.suites.config.autotuning_test:autotuning_test.py:309 nsslapd-cachememsize == b'1409286144' INFO  tests.suites.config.autotuning_test:autotuning_test.py:310 nsslapd-dncachememsize == b'67108864' INFO  tests.suites.config.autotuning_test:autotuning_test.py:311 nsslapd-cache-autosize == b'0' INFO  tests.suites.config.autotuning_test:autotuning_test.py:312 nsslapd-cache-autosize-split == b'25' INFO  tests.suites.config.autotuning_test:autotuning_test.py:273 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test INFO  tests.suites.config.autotuning_test:autotuning_test.py:274 nsslapd-dbcachesize == b'408620441' INFO  tests.suites.config.autotuning_test:autotuning_test.py:275 nsslapd-cachememsize == b'1409286144' INFO  tests.suites.config.autotuning_test:autotuning_test.py:276 nsslapd-cache-autosize == b'0' INFO  tests.suites.config.autotuning_test:autotuning_test.py:277 nsslapd-cache-autosize-split == b'25' INFO  tests.suites.config.autotuning_test:autotuning_test.py:283 Delete nsslapd-cache-autosize-split INFO  tests.suites.config.autotuning_test:autotuning_test.py:289 Set nsslapd-dbcachesize to 33333333 INFO  tests.suites.config.autotuning_test:autotuning_test.py:291 Set nsslapd-cachememsize to 33333333 INFO  tests.suites.config.autotuning_test:autotuning_test.py:307 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. INFO  tests.suites.config.autotuning_test:autotuning_test.py:308 nsslapd-dbcachesize == b'33333333' INFO  tests.suites.config.autotuning_test:autotuning_test.py:309 nsslapd-cachememsize == b'33333333' INFO  tests.suites.config.autotuning_test:autotuning_test.py:310 nsslapd-dncachememsize == b'67108864' INFO  tests.suites.config.autotuning_test:autotuning_test.py:311 nsslapd-cache-autosize == b'0' INFO  tests.suites.config.autotuning_test:autotuning_test.py:312 nsslapd-cache-autosize-split == b'25'
Passed suites/config/autotuning_test.py::test_cache_autosize_basic_sane[40] 9.46
-------------------------------Captured log call--------------------------------
INFO  tests.suites.config.autotuning_test:autotuning_test.py:273 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test INFO  tests.suites.config.autotuning_test:autotuning_test.py:274 nsslapd-dbcachesize == b'33333333' INFO  tests.suites.config.autotuning_test:autotuning_test.py:275 nsslapd-cachememsize == b'33333333' INFO  tests.suites.config.autotuning_test:autotuning_test.py:276 nsslapd-cache-autosize == b'0' INFO  tests.suites.config.autotuning_test:autotuning_test.py:277 nsslapd-cache-autosize-split == b'25' INFO  tests.suites.config.autotuning_test:autotuning_test.py:280 Set nsslapd-cache-autosize-split to 40 INFO  tests.suites.config.autotuning_test:autotuning_test.py:289 Set nsslapd-dbcachesize to 0 INFO  tests.suites.config.autotuning_test:autotuning_test.py:291 Set nsslapd-cachememsize to 0 INFO  tests.suites.config.autotuning_test:autotuning_test.py:307 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. INFO  tests.suites.config.autotuning_test:autotuning_test.py:308 nsslapd-dbcachesize == b'817240883' INFO  tests.suites.config.autotuning_test:autotuning_test.py:309 nsslapd-cachememsize == b'1140850688' INFO  tests.suites.config.autotuning_test:autotuning_test.py:310 nsslapd-dncachememsize == b'67108864' INFO  tests.suites.config.autotuning_test:autotuning_test.py:311 nsslapd-cache-autosize == b'0' INFO  tests.suites.config.autotuning_test:autotuning_test.py:312 nsslapd-cache-autosize-split == b'40' INFO  tests.suites.config.autotuning_test:autotuning_test.py:273 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test INFO  tests.suites.config.autotuning_test:autotuning_test.py:274 nsslapd-dbcachesize == b'817240883' INFO  tests.suites.config.autotuning_test:autotuning_test.py:275 nsslapd-cachememsize == b'1140850688' INFO  tests.suites.config.autotuning_test:autotuning_test.py:276 nsslapd-cache-autosize == b'0' INFO  tests.suites.config.autotuning_test:autotuning_test.py:277 nsslapd-cache-autosize-split == b'40' INFO  tests.suites.config.autotuning_test:autotuning_test.py:280 Set nsslapd-cache-autosize-split to 40 INFO  tests.suites.config.autotuning_test:autotuning_test.py:289 Set nsslapd-dbcachesize to 33333333 INFO  tests.suites.config.autotuning_test:autotuning_test.py:291 Set nsslapd-cachememsize to 33333333 INFO  tests.suites.config.autotuning_test:autotuning_test.py:307 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. INFO  tests.suites.config.autotuning_test:autotuning_test.py:308 nsslapd-dbcachesize == b'33333333' INFO  tests.suites.config.autotuning_test:autotuning_test.py:309 nsslapd-cachememsize == b'33333333' INFO  tests.suites.config.autotuning_test:autotuning_test.py:310 nsslapd-dncachememsize == b'67108864' INFO  tests.suites.config.autotuning_test:autotuning_test.py:311 nsslapd-cache-autosize == b'0' INFO  tests.suites.config.autotuning_test:autotuning_test.py:312 nsslapd-cache-autosize-split == b'40'
Passed suites/config/autotuning_test.py::test_cache_autosize_invalid_values[-2] 0.03
-------------------------------Captured log call--------------------------------
INFO  tests.suites.config.autotuning_test:autotuning_test.py:346 Set nsslapd-cache-autosize-split to -2 INFO  tests.suites.config.autotuning_test:autotuning_test.py:352 Set nsslapd-cache-autosize to -2
Passed suites/config/autotuning_test.py::test_cache_autosize_invalid_values[102] 0.04
-------------------------------Captured log call--------------------------------
INFO  tests.suites.config.autotuning_test:autotuning_test.py:346 Set nsslapd-cache-autosize-split to 102 INFO  tests.suites.config.autotuning_test:autotuning_test.py:352 Set nsslapd-cache-autosize to 102
Passed suites/config/autotuning_test.py::test_cache_autosize_invalid_values[invalid] 0.05
-------------------------------Captured log call--------------------------------
INFO  tests.suites.config.autotuning_test:autotuning_test.py:346 Set nsslapd-cache-autosize-split to invalid INFO  tests.suites.config.autotuning_test:autotuning_test.py:352 Set nsslapd-cache-autosize to invalid
Passed suites/config/config_test.py::test_maxbersize_repl 16.49
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect f6925c14-a086-4d92-90fe-cd7391a48c11 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect e7810810-16e8-4526-86fb-cd906f345eaa / got description=f6925c14-a086-4d92-90fe-cd7391a48c11) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
-------------------------------Captured log call--------------------------------
INFO  tests.suites.config.config_test:config_test.py:69 Set nsslapd-maxbersize: 20K to master2 INFO  tests.suites.config.config_test:config_test.py:74 Try to add attribute with a big value to master2 - expect to FAIL INFO  tests.suites.config.config_test:config_test.py:81 Try to add attribute with a big value to master1 - expect to PASS INFO  tests.suites.config.config_test:config_test.py:86 Check if a big value was successfully added to master1 INFO  tests.suites.config.config_test:config_test.py:90 Check if a big value was successfully replicated to master2
Passed suites/config/config_test.py::test_config_listen_backport_size 0.03
No log output captured.
Passed suites/config/config_test.py::test_config_deadlock_policy 0.08
No log output captured.
Passed suites/config/config_test.py::test_defaultnamingcontext 0.98
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.suites.config.config_test:config_test.py:220 Check the attribute nsslapd-defaultnamingcontext is present in cn=config INFO  tests.suites.config.config_test:config_test.py:223 Delete nsslapd-defaultnamingcontext attribute INFO  tests.suites.config.config_test:config_test.py:230 modify nsslapd-defaultnamingcontext with new suffix INFO  tests.suites.config.config_test:config_test.py:233 Add new invalid value at runtime to nsslapd-defaultnamingcontext INFO  tests.suites.config.config_test:config_test.py:237 Modify nsslapd-defaultnamingcontext with blank value INFO  tests.suites.config.config_test:config_test.py:240 Add new suffix when nsslapd-defaultnamingcontext is empty INFO  tests.suites.config.config_test:config_test.py:244 Check the value of the nsslapd-defaultnamingcontext automatically have the new suffix INFO  tests.suites.config.config_test:config_test.py:247 Adding new suffix when nsslapd-defaultnamingcontext is not empty INFO  tests.suites.config.config_test:config_test.py:251 Check the value of the nsslapd-defaultnamingcontext has not changed INFO  tests.suites.config.config_test:config_test.py:254 Remove the newly added suffix and check the values of the attribute is not changed INFO  tests.suites.config.config_test:config_test.py:258 Remove all the suffix at the end
Passed suites/config/config_test.py::test_allow_add_delete_config_attributes 3.41
-------------------------------Captured log call--------------------------------
INFO  tests.suites.config.config_test:config_test.py:308 Add a new valid attribute at runtime to cn=config INFO  tests.suites.config.config_test:config_test.py:312 Delete nsslapd-listenhost to restore the default value INFO  tests.suites.config.config_test:config_test.py:317 Add new invalid attribute at runtime to cn=config INFO  tests.suites.config.config_test:config_test.py:321 Make sure the invalid attribute is not added
Passed suites/config/config_test.py::test_ignore_virtual_attrs 0.09
-------------------------------Captured log call--------------------------------
INFO  tests.suites.config.config_test:config_test.py:354 Check the attribute nsslapd-ignore-virtual-attrs is present in cn=config INFO  tests.suites.config.config_test:config_test.py:357 Check the default value of attribute nsslapd-ignore-virtual-attrs should be OFF INFO  tests.suites.config.config_test:config_test.py:360 Set the valid values i.e. on/ON and off/OFF for nsslapd-ignore-virtual-attrs INFO  tests.suites.config.config_test:config_test.py:365 Set invalid value for attribute nsslapd-ignore-virtual-attrs INFO  tests.suites.config.config_test:config_test.py:376 Add cosPointer, cosTemplate and test entry to default suffix, where virtual attribute is postal code INFO  tests.suites.config.config_test:config_test.py:389 Test if virtual attribute i.e. postal code shown in test entry while nsslapd-ignore-virtual-attrs: off INFO  tests.suites.config.config_test:config_test.py:392 Set nsslapd-ignore-virtual-attrs=on INFO  tests.suites.config.config_test:config_test.py:395 Test if virtual attribute i.e. postal code not shown while nsslapd-ignore-virtual-attrs: on
Passed suites/config/config_test.py::test_ndn_cache_enabled 9.17
-------------------------------Captured log call--------------------------------
INFO  tests.suites.config.config_test:config_test.py:423 Check the attribute nsslapd-ndn-cache-enabled is present in cn=config INFO  tests.suites.config.config_test:config_test.py:426 Check the attribute nsslapd-ndn-cache-enabled has the default value set as ON INFO  tests.suites.config.config_test:config_test.py:429 Check the attribute nsslapd-ndn-cache-max-size is present in cn=config INFO  tests.suites.config.config_test:config_test.py:435 Ticket#49593 : NDN cache stats should be under the global stats - Implemented in 1.4 INFO  tests.suites.config.config_test:config_test.py:436 Fetch the monitor value according to the ds version INFO  tests.suites.config.config_test:config_test.py:442 Check the backend monitor output for Normalized DN cache statistics, while nsslapd-ndn-cache-enabled is off INFO  tests.suites.config.config_test:config_test.py:448 Check the backend monitor output for Normalized DN cache statistics, while nsslapd-ndn-cache-enabled is on INFO  tests.suites.config.config_test:config_test.py:454 Set invalid value for nsslapd-ndn-cache-enabled INFO  tests.suites.config.config_test:config_test.py:458 Set invalid value for nsslapd-ndn-cache-max-size
Passed suites/config/config_test.py::test_require_index 1.94
No log output captured.
Passed suites/config/config_test.py::test_require_internal_index 5.45
No log output captured.
Passed suites/config/regression_test.py::test_maxbersize_repl 7.17
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.suites.config.regression_test:regression_test.py:100 Set nsslapd-errorlog-maxlogsize before nsslapd-errorlog-logmaxdiskspace INFO  tests.suites.config.regression_test:regression_test.py:104 Assert no init_dse_file errors in the error log INFO  tests.suites.config.regression_test:regression_test.py:108 Set nsslapd-errorlog-maxlogsize after nsslapd-errorlog-logmaxdiskspace INFO  tests.suites.config.regression_test:regression_test.py:112 Assert no init_dse_file errors in the error log
Passed suites/config/removed_config_49298_test.py::test_restore_config 3.49
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.suites.config.removed_config_49298_test:removed_config_49298_test.py:43 /etc/dirsrv/slapd-standalone1
Passed suites/config/removed_config_49298_test.py::test_removed_config 2.46
-------------------------------Captured log call--------------------------------
INFO  tests.suites.config.removed_config_49298_test:removed_config_49298_test.py:72 /etc/dirsrv/slapd-standalone1
Passed suites/cos/cos_test.py::test_positive 0.71
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/cos/indirect_cos_test.py::test_indirect_cos 1.46
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO  tests.suites.cos.indirect_cos_test:indirect_cos_test.py:113 Add custom schema... INFO  tests.suites.cos.indirect_cos_test:indirect_cos_test.py:126 Add test user... INFO  tests.suites.cos.indirect_cos_test:indirect_cos_test.py:143 Setup indirect COS...
------------------------------Captured stdout call------------------------------
Successfully created subtree password policy
-------------------------------Captured log call--------------------------------
INFO  tests.suites.cos.indirect_cos_test:indirect_cos_test.py:163 Checking user... INFO  tests.suites.cos.indirect_cos_test:indirect_cos_test.py:60 Create password policy for subtree ou=people,dc=example,dc=com INFO  tests.suites.cos.indirect_cos_test:indirect_cos_test.py:170 Checking user...
Passed suites/disk_monitoring/disk_monitoring_test.py::test_verify_operation_when_disk_monitoring_is_off 4.52
-----------------------------Captured stdout setup------------------------------
Relabeled /var/log/dirsrv/slapd-standalone1 from unconfined_u:object_r:user_tmp_t:s0 to system_u:object_r:dirsrv_var_log_t:s0
-----------------------------Captured stderr setup------------------------------
chown: cannot access '/var/log/dirsrv/slapd-standalone1/*': No such file or directory
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
------------------------------Captured stderr call------------------------------
25+0 records in 25+0 records out 26214400 bytes (26 MB, 25 MiB) copied, 0.0142791 s, 1.8 GB/s dd: error writing '/var/log/dirsrv/slapd-standalone1/foo1': No space left on device 10+0 records in 9+0 records out 10465280 bytes (10 MB, 10 MiB) copied, 0.00557046 s, 1.9 GB/s
Passed suites/disk_monitoring/disk_monitoring_test.py::test_free_up_the_disk_space_and_change_ds_config 4.42
No log output captured.
Passed suites/disk_monitoring/disk_monitoring_test.py::test_verify_operation_with_nsslapd_disk_monitoring_logging_critical_off 34.66
------------------------------Captured stderr call------------------------------
10+0 records in 10+0 records out 10485760 bytes (10 MB, 10 MiB) copied, 0.00532054 s, 2.0 GB/s
Passed suites/disk_monitoring/disk_monitoring_test.py::test_operation_with_nsslapd_disk_monitoring_logging_critical_on_below_half_of_the_threshold 25.44
------------------------------Captured stderr call------------------------------
31+0 records in 31+0 records out 32505856 bytes (33 MB, 31 MiB) copied, 0.0139643 s, 2.3 GB/s
Passed suites/disk_monitoring/disk_monitoring_test.py::test_setting_nsslapd_disk_monitoring_logging_critical_to_off 3.51
No log output captured.
Passed suites/disk_monitoring/disk_monitoring_test.py::test_operation_with_nsslapd_disk_monitoring_logging_critical_off 70.17
------------------------------Captured stderr call------------------------------
10+0 records in 10+0 records out 10485760 bytes (10 MB, 10 MiB) copied, 0.0129337 s, 811 MB/s
Passed suites/disk_monitoring/disk_monitoring_test.py::test_operation_with_nsslapd_disk_monitoring_logging_critical_off_below_half_of_the_threshold 157.69
------------------------------Captured stderr call------------------------------
30+0 records in 30+0 records out 31457280 bytes (31 MB, 30 MiB) copied, 0.0244906 s, 1.3 GB/s
Passed suites/disk_monitoring/disk_monitoring_test.py::test_go_straight_below_half_of_the_threshold 104.87
------------------------------Captured stderr call------------------------------
31+0 records in 31+0 records out 32505856 bytes (33 MB, 31 MiB) copied, 0.0348335 s, 933 MB/s
Passed suites/disk_monitoring/disk_monitoring_test.py::test_readonly_on_threshold 28.17
------------------------------Captured stderr call------------------------------
10+0 records in 10+0 records out 10485760 bytes (10 MB, 10 MiB) copied, 0.0145985 s, 718 MB/s
Passed suites/disk_monitoring/disk_monitoring_test.py::test_readonly_on_threshold_below_half_of_the_threshold 49.86
------------------------------Captured stderr call------------------------------
31+0 records in 31+0 records out 32505856 bytes (33 MB, 31 MiB) copied, 0.0140996 s, 2.3 GB/s
Passed suites/disk_monitoring/disk_monitoring_test.py::test_below_half_of_the_threshold_not_starting_after_shutdown 112.13
------------------------------Captured stderr call------------------------------
31+0 records in 31+0 records out 32505856 bytes (33 MB, 31 MiB) copied, 0.0162828 s, 2.0 GB/s
-------------------------------Captured log call--------------------------------
INFO  lib389:disk_monitoring_test.py:582 Instance start up has failed as expected
Passed suites/disk_monitoring/disk_monitoring_test.py::test_go_straight_below_4kb 18.25
------------------------------Captured stderr call------------------------------
25+0 records in 25+0 records out 26214400 bytes (26 MB, 25 MiB) copied, 0.0114747 s, 2.3 GB/s dd: error writing '/var/log/dirsrv/slapd-standalone1/foo1': No space left on device 10+0 records in 9+0 records out 10174464 bytes (10 MB, 9.7 MiB) copied, 0.0052121 s, 2.0 GB/s
Passed suites/disk_monitoring/disk_monitoring_test.py::test_threshold_to_overflow_value 0.03
No log output captured.
Passed suites/disk_monitoring/disk_monitoring_test.py::test_threshold_is_reached_to_half 14.48
------------------------------Captured stderr call------------------------------
10+0 records in 10+0 records out 10485760 bytes (10 MB, 10 MiB) copied, 0.00591322 s, 1.8 GB/s
Passed suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-threshold--2] 0.00
No log output captured.
Passed suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-threshold-9223372036854775808] 0.00
No log output captured.
Passed suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-threshold-2047] 0.00
No log output captured.
Passed suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-threshold-0] 0.00
No log output captured.
Passed suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-threshold--1294967296] 0.00
No log output captured.
Passed suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-threshold-invalid] 0.00
No log output captured.
Passed suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-invalid] 0.00
No log output captured.
Passed suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-1] 0.00
No log output captured.
Passed suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-grace-period-00] 0.00
No log output captured.
Passed suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-grace-period-525 948] 0.00
No log output captured.
Passed suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-grace-period--10] 0.00
No log output captured.
Passed suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-logging-critical-oninvalid] 0.00
No log output captured.
Passed suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-grace-period--11] 0.00
No log output captured.
Passed suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-grace-period-01] 0.00
No log output captured.
Passed suites/disk_monitoring/disk_monitoring_test.py::test_valid_operations_are_permitted 3.91
No log output captured.
Passed suites/disk_monitoring/disk_space_test.py::test_basic 0.00
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  lib389:disk_space_test.py:37 Check that "partition", "size", "used", "available", "use%" words are present in the string INFO  lib389:disk_space_test.py:41 Check that the sizes are numbers
Passed suites/ds_logs/ds_logs_test.py::test_check_default 0.00
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
DEBUG  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:232 on
Passed suites/ds_logs/ds_logs_test.py::test_plugin_set_invalid 0.16
-------------------------------Captured log call--------------------------------
INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:251 test_plugin_set_invalid - Expect to fail with junk value
Passed suites/ds_logs/ds_logs_test.py::test_log_plugin_on 4.15
-------------------------------Captured log call--------------------------------
INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:278 Bug 1273549 - Check access logs for millisecond, when attribute is ON INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:279 perform any ldap operation, which will trigger the logs INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:35 Adding 10 users INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:283 Restart the server to flush the logs INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:286 parse the access logs
Passed suites/ds_logs/ds_logs_test.py::test_log_plugin_off 12.51
-------------------------------Captured log call--------------------------------
INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:318 Bug 1273549 - Check access logs for missing millisecond, when attribute is OFF INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:320 test_log_plugin_off - set the configuration attribute to OFF INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:323 Restart the server to flush the logs INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:326 test_log_plugin_off - delete the previous access logs INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:35 Adding 10 users INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:333 Restart the server to flush the logs INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:336 check access log that microseconds are not present
Passed suites/ds_logs/ds_logs_test.py::test_internal_log_server_level_0 4.01
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:199 Disable access log buffering
-------------------------------Captured log call--------------------------------
INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:364 Set nsslapd-plugin-logging to on INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:367 Configure access log level to 0 INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:371 Restart the server to flush the logs INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:375 Check if access log does not contain internal log of MOD operation INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:381 Check if the other internal operations are not present
Passed suites/ds_logs/ds_logs_test.py::test_internal_log_server_level_4 6.13
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:199 Disable access log buffering
-------------------------------Captured log call--------------------------------
INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:413 Set nsslapd-plugin-logging to on INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:416 Configure access log level to 4 INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:420 Restart the server to flush the logs INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:425 Check if access log contains internal MOD operation in correct format INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:431 Check if the other internal operations have the correct format
Passed suites/ds_logs/ds_logs_test.py::test_internal_log_level_260 6.48
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:101 Enable automember plugin INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:105 Enable Referential Integrity plugin INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:109 Set nsslapd-plugin-logging to on INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:112 Restart the server INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:119 Configure access log level to 260 INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:85 Renaming user INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:88 Delete the user INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:91 Delete automember entry, org. unit and group for the next test INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:199 Disable access log buffering
-------------------------------Captured log call--------------------------------
INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:471 Restart the server to flush the logs INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:475 Check the access logs for ADD operation of the user INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:491 Check the access logs for MOD operation of the user INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:505 Check the access logs for DEL operation of the user INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:517 Check if the other internal operations have the correct format
Passed suites/ds_logs/ds_logs_test.py::test_internal_log_level_131076 6.52
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:119 Configure access log level to 131076 INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:85 Renaming user INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:88 Delete the user INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:91 Delete automember entry, org. unit and group for the next test INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:199 Disable access log buffering
-------------------------------Captured log call--------------------------------
INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:556 Restart the server to flush the logs INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:560 Check the access logs for ADD operation of the user INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:576 Check the access logs for MOD operation of the user INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:590 Check the access logs for DEL operation of the user INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:602 Check if the other internal operations have the correct format
Passed suites/ds_logs/ds_logs_test.py::test_internal_log_level_516 6.46
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:119 Configure access log level to 516 INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:85 Renaming user INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:88 Delete the user INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:91 Delete automember entry, org. unit and group for the next test INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:199 Disable access log buffering
-------------------------------Captured log call--------------------------------
INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:641 Restart the server to flush the logs INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:645 Check the access logs for ADD operation of the user INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:662 Check the access logs for MOD operation of the user INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:679 Check the access logs for DEL operation of the user INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:694 Check if the other internal operations have the correct format
Passed suites/ds_logs/ds_logs_test.py::test_access_log_truncated_search_message 4.55
-------------------------------Captured log call--------------------------------
INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:721 Make a search INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:724 Restart the server to flush the logs
Passed suites/ds_logs/ds_logs_test.py::test_etime_order_of_magnitude 2.75
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:199 Disable access log buffering
-------------------------------Captured log call--------------------------------
INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:810 add_users INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:35 Adding 30 users INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:813 search users INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:816 parse the access logs to get the SRCH string INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:824 get the operation start time from the SRCH string INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:828 get the OP number from the SRCH string INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:832 get the RESULT string matching the SRCH OP number INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:840 get the operation end time from the RESULT string INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:844 get the logged etime for the operation from the RESULT string INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:848 Calculate the ratio between logged etime for the operation and elapsed time from its start time to its end time - should be around 1
Passed suites/ds_logs/ds_logs_test.py::test_log_base_dn_when_invalid_attr_request 3.54
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:199 Disable access log buffering
-------------------------------Captured log call--------------------------------
INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:880 delete the previous access logs to get a fresh new one INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:883 Search the default suffix, with invalid '"" ""' attribute request INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:884 A Protocol error exception should be raised, see https://github.com/389ds/389-ds-base/issues/3028 INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:890 Check the access logs for correct messages
Passed suites/ds_logs/ds_logs_test.py::test_audit_log_rotate_and_check_string 7.14
-------------------------------Captured log call--------------------------------
INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:936 Doing modifications to rotate audit log INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:941 Doing one more modification just in case INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:948 Check that DS string is present on first line INFO  tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:952 Check that DS string is present only once
Passed suites/ds_logs/regression_test.py::test_default_loglevel_stripped[24576] 0.01
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/ds_logs/regression_test.py::test_default_loglevel_stripped[16512] 0.20
No log output captured.
Passed suites/ds_logs/regression_test.py::test_default_loglevel_stripped[16385] 1.09
No log output captured.
Passed suites/ds_logs/regression_test.py::test_dse_config_loglevel_error 12.39
No log output captured.
Passed suites/ds_tools/logpipe_test.py::test_user_permissions 0.05
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO  tests.suites.ds_tools.logpipe_test:logpipe_test.py:32 Add system test user - dirsrv_testuser
-------------------------------Captured log call--------------------------------
INFO  tests.suites.ds_tools.logpipe_test:logpipe_test.py:68 Try to create a logpipe in the log directory with "-u" option specifying the user
Passed suites/ds_tools/replcheck_test.py::test_state 0.37
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 531c9466-6c48-43b9-b760-ffe7dd970adb / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ba0ca115-48ea-4856-a693-7c977c7b7e0a / got description=531c9466-6c48-43b9-b760-ffe7dd970adb) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect 49efb769-0a1d-4633-a766-f24bc0f415f3 / got description=ba0ca115-48ea-4856-a693-7c977c7b7e0a) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 is working INFO  tests.suites.ds_tools.replcheck_test:replcheck_test.py:101 Export LDAPTLS_CACERTDIR env variable for ds-replcheck INFO  lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect 624cd5d0-94a8-4cbc-8113-cb265521a1d7 / got description=49efb769-0a1d-4633-a766-f24bc0f415f3) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect 45c9cc6f-d8ac-4b29-a54f-417107989af3 / got description=624cd5d0-94a8-4cbc-8113-cb265521a1d7) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 is working
Passed suites/ds_tools/replcheck_test.py::test_check_ruv 18.76
------------------------------Captured stderr call------------------------------
ldiffile: /tmp/export_master1.ldif ldiffile: /tmp/export_master2.ldif
Passed suites/ds_tools/replcheck_test.py::test_missing_entries 17.69
------------------------------Captured stderr call------------------------------
ldiffile: /tmp/export_master1.ldif ldiffile: /tmp/export_master2.ldif
Passed suites/ds_tools/replcheck_test.py::test_tombstones 21.20
------------------------------Captured stderr call------------------------------
ldiffile: /tmp/export_master1.ldif ldiffile: /tmp/export_master2.ldif
Passed suites/ds_tools/replcheck_test.py::test_conflict_entries 28.21
------------------------------Captured stderr call------------------------------
ldiffile: /tmp/export_master1.ldif ldiffile: /tmp/export_master2.ldif
Passed suites/ds_tools/replcheck_test.py::test_inconsistencies 22.74
------------------------------Captured stderr call------------------------------
ldiffile: /tmp/export_master1.ldif ldiffile: /tmp/export_master2.ldif
Passed suites/ds_tools/replcheck_test.py::test_suffix_exists 0.36
No log output captured.
Passed suites/ds_tools/replcheck_test.py::test_check_missing_tombstones 19.28
------------------------------Captured stderr call------------------------------
ldiffile: /tmp/export_master1.ldif ldiffile: /tmp/export_master2.ldif
Passed suites/ds_tools/replcheck_test.py::test_dsreplcheck_with_password_file 0.01
No log output captured.
Passed suites/ds_tools/replcheck_test.py::test_dsreplcheck_timeout_connection_mechanisms 1.33
-------------------------------Captured log call--------------------------------
INFO  tests.suites.ds_tools.replcheck_test:replcheck_test.py:535 Run ds-replcheck with -t option
Passed suites/dynamic_plugins/dynamic_plugins_test.py::test_acceptance 39.35
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 3f04b9b5-c6d4-4921-aa13-ef75a677e6c0 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ef9e28ef-8c9e-4a42-9cb4-9fcb653bb475 / got description=3f04b9b5-c6d4-4921-aa13-ef75a677e6c0) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 909573fb-13e2-4507-bd5b-60a6d879c377 / got description=ef9e28ef-8c9e-4a42-9cb4-9fcb653bb475) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 46a0d66f-dbdf-4615-b601-8141c3e51f78 / got description=909573fb-13e2-4507-bd5b-60a6d879c377) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working
Passed suites/dynamic_plugins/dynamic_plugins_test.py::test_memory_corruption 42.70
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 7cb25f70-89f1-4757-b8f4-7a035c19bc37 / got description=46a0d66f-dbdf-4615-b601-8141c3e51f78) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ba03edd8-ceaf-4739-b810-a8967cf49218 / got description=7cb25f70-89f1-4757-b8f4-7a035c19bc37) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working
Passed suites/dynamic_plugins/dynamic_plugins_test.py::test_stress 548.09
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 8210d710-0096-43df-9e70-6cd80447929f / got description=ba03edd8-ceaf-4739-b810-a8967cf49218) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 8210d710-0096-43df-9e70-6cd80447929f / got description=ba03edd8-ceaf-4739-b810-a8967cf49218) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 8210d710-0096-43df-9e70-6cd80447929f / got description=ba03edd8-ceaf-4739-b810-a8967cf49218) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect bfe74c39-97fb-4ae6-9d49-f02abf28c4db / got description=8210d710-0096-43df-9e70-6cd80447929f) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working
Passed suites/dynamic_plugins/notice_for_restart_test.py::test_notice_when_dynamic_not_enabled 4.64
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/export/export_test.py::test_dbtasks_db2ldif_with_non_accessible_ldif_file_path 4.76
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
------------------------------Captured stderr call------------------------------
ldiffile: /tmp/nonexistent/export.ldif
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:export_test.py:63 Stopping the instance... INFO  lib389.utils:export_test.py:66 Performing an offline export to a non accessible ldif file path - should fail properly CRITICAL LogCapture:dbtasks.py:40 db2ldif failed INFO  lib389.utils:export_test.py:33 checking output msg INFO  lib389.utils:export_test.py:38 Clear the log INFO  lib389.utils:export_test.py:70 parsing the errors log to search for the error reported INFO  lib389.utils:export_test.py:79 Restarting the instance...
Passed suites/export/export_test.py::test_db2ldif_cli_with_non_accessible_ldif_file_path 5.01
------------------------------Captured stdout call------------------------------
db2ldif failed
------------------------------Captured stderr call------------------------------
ldiffile: /tmp/nonexistent/export.ldif
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:export_test.py:104 Stopping the instance... INFO  lib389.utils:export_test.py:107 Performing an offline export to a non accessible ldif file path - should fail properly INFO  lib389.utils:export_test.py:115 db2ldif failed properly: error (1) INFO  lib389.utils:export_test.py:118 parsing the errors log to search for the error reported INFO  lib389.utils:export_test.py:121 error string : '[28/Oct/2020:21:10:55.205219446 -0400] - ERR - bdb_db2ldif - db2ldif: userRoot: can\'t open /tmp/nonexistent/export.ldif: 2 (No such file or directory) while running as user "dirsrv"\n', '[28/Oct/2020:21:11:00.271423599 -0400] - ERR - bdb_db2ldif - db2ldif: userRoot: can\'t open /tmp/nonexistent/export.ldif: 2 (No such file or directory) while running as user "dirsrv"\n' INFO  lib389.utils:export_test.py:123 Restarting the instance...
Passed suites/filter/basic_filter_test.py::test_search_attr 0.34
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/filter/bitw_filter_test.py::test_bitwise_plugin_status 0.00
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/filter/bitw_filter_test.py::test_search_disabled_accounts 0.01
No log output captured.
Passed suites/filter/bitw_filter_test.py::test_plugin_can_be_disabled 3.50
No log output captured.
Passed suites/filter/bitw_filter_test.py::test_plugin_is_disabled 0.00
No log output captured.
Passed suites/filter/bitw_filter_test.py::test_enabling_works_fine 4.39
No log output captured.
Passed suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=513))-1] 0.00
No log output captured.
Passed suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=16777216))-1] 0.17
No log output captured.
Passed suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=8388608))-1] 0.01
No log output captured.
Passed suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.804:=5))-3] 0.00
No log output captured.
Passed suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.804:=8))-3] 0.17
No log output captured.
Passed suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.804:=7))-5] 0.01
No log output captured.
Passed suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testerperson) (testUserAccountControl:1.2.840.113556.1.4.804:=7))-0] 0.00
No log output captured.
Passed suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (&(testUserAccountControl:1.2.840.113556.1.4.803:=98536)(testUserAccountControl:1.2.840.113556.1.4.803:=912)))-0] 0.00
No log output captured.
Passed suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (&(testUserAccountControl:1.2.840.113556.1.4.804:=87)(testUserAccountControl:1.2.840.113556.1.4.804:=91)))-8] 0.00
No log output captured.
Passed suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (&(testUserAccountControl:1.2.840.113556.1.4.803:=89536)(testUserAccountControl:1.2.840.113556.1.4.804:=79)))-1] 0.14
No log output captured.
Passed suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (|(testUserAccountControl:1.2.840.113556.1.4.803:=89536)(testUserAccountControl:1.2.840.113556.1.4.804:=79)))-8] 0.01
No log output captured.
Passed suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (|(testUserAccountControl:1.2.840.113556.1.4.803:=89)(testUserAccountControl:1.2.840.113556.1.4.803:=536)))-0] 0.00
No log output captured.
Passed suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=x))-13] 0.16
No log output captured.
Passed suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=&\*#$%))-13] 0.01
No log output captured.
Passed suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=-65536))-0] 0.00
No log output captured.
Passed suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=-1))-0] 0.00
No log output captured.
Passed suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=-))-13] 0.01
No log output captured.
Passed suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=))-13] 0.15
No log output captured.
Passed suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=\*))-13] 0.01
No log output captured.
Passed suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.804:=\*))-0] 0.00
No log output captured.
Passed suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=6552))-0] 0.00
No log output captured.
Passed suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson\))(testUserAccountControl:1.2.840.113556.1.4.804:=6552))-0] 0.00
No log output captured.
Passed suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=65536))-5] 0.01
No log output captured.
Passed suites/filter/bitw_filter_test.py::test_5_entries 0.21
No log output captured.
Passed suites/filter/bitw_filter_test.py::test_5_entries1 0.19
No log output captured.
Passed suites/filter/bitw_filter_test.py::test_5_entries3 0.02
No log output captured.
Passed suites/filter/bitw_filter_test.py::test_5_entries4 0.02
No log output captured.
Passed suites/filter/complex_filters_test.py::test_filters[(&(uid=uid1)(sn=last1)(givenname=first1))-1] 0.00
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=uid1)(sn=last1)(givenname=first1))"...
Passed suites/filter/complex_filters_test.py::test_filters[(&(uid=uid1)(&(sn=last1)(givenname=first1)))-1] 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=uid1)(&(sn=last1)(givenname=first1)))"...
Passed suites/filter/complex_filters_test.py::test_filters[(&(uid=uid1)(&(&(sn=last1))(&(givenname=first1))))-1] 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=uid1)(&(&(sn=last1))(&(givenname=first1))))"...
Passed suites/filter/complex_filters_test.py::test_filters[(&(uid=*)(sn=last3)(givenname=*))-1] 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=*)(sn=last3)(givenname=*))"...
Passed suites/filter/complex_filters_test.py::test_filters[(&(uid=*)(&(sn=last3)(givenname=*)))-1] 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=*)(&(sn=last3)(givenname=*)))"...
Passed suites/filter/complex_filters_test.py::test_filters[(&(uid=uid5)(&(&(sn=*))(&(givenname=*))))-1] 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=uid5)(&(&(sn=*))(&(givenname=*))))"...
Passed suites/filter/complex_filters_test.py::test_filters[(&(objectclass=*)(uid=*)(sn=last*))-5] 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(objectclass=*)(uid=*)(sn=last*))"...
Passed suites/filter/complex_filters_test.py::test_filters[(&(objectclass=*)(uid=*)(sn=last1))-1] 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(objectclass=*)(uid=*)(sn=last1))"...
Passed suites/filter/complex_filters_test.py::test_filters[(|(uid=uid1)(sn=last1)(givenname=first1))-1] 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(uid=uid1)(sn=last1)(givenname=first1))"...
Passed suites/filter/complex_filters_test.py::test_filters[(|(uid=uid1)(|(sn=last1)(givenname=first1)))-1] 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(uid=uid1)(|(sn=last1)(givenname=first1)))"...
Passed suites/filter/complex_filters_test.py::test_filters[(|(uid=uid1)(|(|(sn=last1))(|(givenname=first1))))-1] 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(uid=uid1)(|(|(sn=last1))(|(givenname=first1))))"...
Passed suites/filter/complex_filters_test.py::test_filters[(|(objectclass=*)(sn=last1)(|(givenname=first1)))-18] 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(objectclass=*)(sn=last1)(|(givenname=first1)))"...
Passed suites/filter/complex_filters_test.py::test_filters[(|(&(objectclass=*)(sn=last1))(|(givenname=first1)))-1] 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(&(objectclass=*)(sn=last1))(|(givenname=first1)))"...
Passed suites/filter/complex_filters_test.py::test_filters[(|(&(objectclass=*)(sn=last))(|(givenname=first1)))-1] 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(&(objectclass=*)(sn=last))(|(givenname=first1)))"...
Passed suites/filter/complex_filters_test.py::test_filters[(&(uid=uid1)(!(cn=NULL)))-1] 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=uid1)(!(cn=NULL)))"...
Passed suites/filter/complex_filters_test.py::test_filters[(&(!(cn=NULL))(uid=uid1))-1] 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(!(cn=NULL))(uid=uid1))"...
Passed suites/filter/complex_filters_test.py::test_filters[(&(uid=*)(&(!(uid=1))(!(givenname=first1))))-5] 0.01
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=*)(&(!(uid=1))(!(givenname=first1))))"...
Passed suites/filter/complex_filters_test.py::test_filters[(&(|(uid=uid1)(uid=NULL))(sn=last1))-1] 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(|(uid=uid1)(uid=NULL))(sn=last1))"...
Passed suites/filter/complex_filters_test.py::test_filters[(&(|(uid=uid1)(uid=NULL))(!(sn=NULL)))-1] 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(|(uid=uid1)(uid=NULL))(!(sn=NULL)))"...
Passed suites/filter/complex_filters_test.py::test_filters[(&(|(uid=uid1)(sn=last2))(givenname=first1))-1] 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(|(uid=uid1)(sn=last2))(givenname=first1))"...
Passed suites/filter/complex_filters_test.py::test_filters[(|(&(uid=uid1)(!(uid=NULL)))(sn=last2))-2] 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(&(uid=uid1)(!(uid=NULL)))(sn=last2))"...
Passed suites/filter/complex_filters_test.py::test_filters[(|(&(uid=uid1)(uid=NULL))(sn=last2))-1] 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(&(uid=uid1)(uid=NULL))(sn=last2))"...
Passed suites/filter/complex_filters_test.py::test_filters[(&(uid=uid5)(sn=*)(cn=*)(givenname=*)(uid=u*)(sn=la*)(cn=full*)(givenname=f*)(uid>=u)(!(givenname=NULL)))-1] 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=uid5)(sn=*)(cn=*)(givenname=*)(uid=u*)(sn=la*)(cn=full*)(givenname=f*)(uid>=u)(!(givenname=NULL)))"...
Passed suites/filter/complex_filters_test.py::test_filters[(|(&(objectclass=*)(sn=last))(&(givenname=first1)))-1] 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(&(objectclass=*)(sn=last))(&(givenname=first1)))"...
Passed suites/filter/complex_filters_test.py::test_filters[(&(uid=uid1)(sn=last1)(givenname=NULL))-0] 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=uid1)(sn=last1)(givenname=NULL))"...
Passed suites/filter/complex_filters_test.py::test_filters[(&(uid=uid1)(&(sn=last1)(givenname=NULL)))-0] 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=uid1)(&(sn=last1)(givenname=NULL)))"...
Passed suites/filter/complex_filters_test.py::test_filters[(&(uid=uid1)(&(&(sn=last1))(&(givenname=NULL))))-0] 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=uid1)(&(&(sn=last1))(&(givenname=NULL))))"...
Passed suites/filter/complex_filters_test.py::test_filters[(&(uid=uid1)(&(&(sn=last1))(&(givenname=NULL)(sn=*)))(|(sn=NULL)))-0] 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=uid1)(&(&(sn=last1))(&(givenname=NULL)(sn=*)))(|(sn=NULL)))"...
Passed suites/filter/complex_filters_test.py::test_filters[(&(uid=uid1)(&(&(sn=last*))(&(givenname=first*)))(&(sn=NULL)))-0] 0.01
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=uid1)(&(&(sn=last*))(&(givenname=first*)))(&(sn=NULL)))"...
Passed suites/filter/complex_filters_test.py::test_filters[(|(uid=NULL)(sn=NULL)(givenname=NULL))-0] 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(uid=NULL)(sn=NULL)(givenname=NULL))"...
Passed suites/filter/complex_filters_test.py::test_filters[(|(uid=NULL)(|(sn=NULL)(givenname=NULL)))-0] 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(uid=NULL)(|(sn=NULL)(givenname=NULL)))"...
Passed suites/filter/complex_filters_test.py::test_filters[(|(uid=NULL)(|(|(sn=NULL))(|(givenname=NULL))))-0] 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(uid=NULL)(|(|(sn=NULL))(|(givenname=NULL))))"...
Passed suites/filter/complex_filters_test.py::test_filters[(|(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*))-6] 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*))"...
Passed suites/filter/complex_filters_test.py::test_filters[(uid>=uid3)-3] 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(uid>=uid3)"...
Passed suites/filter/complex_filters_test.py::test_filters[(&(uid=*)(uid>=uid3))-3] 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=*)(uid>=uid3))"...
Passed suites/filter/complex_filters_test.py::test_filters[(|(uid>=uid3)(uid<=uid5))-6] 0.01
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(uid>=uid3)(uid<=uid5))"...
Passed suites/filter/complex_filters_test.py::test_filters[(&(uid>=uid3)(uid<=uid5))-3] 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid>=uid3)(uid<=uid5))"...
Passed suites/filter/complex_filters_test.py::test_filters[(|(&(uid>=uid3)(uid<=uid5))(uid=*))-6] 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(&(uid>=uid3)(uid<=uid5))(uid=*))"...
Passed suites/filter/filter_cert_test.py::test_positive 24.12
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index0] 0.02
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index1] 0.01
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index2] 0.01
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index3] 0.02
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index4] 0.02
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index5] 0.01
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index6] 0.01
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index7] 0.01
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index8] 0.01
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index9] 0.01
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index10] 0.01
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index11] 0.01
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index12] 0.02
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index13] 0.01
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index14] 0.01
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index15] 0.26
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index16] 0.02
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index17] 0.01
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods[mod0] 0.04
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods[mod1] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods[mod2] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods[mod3] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods[mod4] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods[mod5] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods[mod6] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods[mod7] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods[mod8] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods[mod9] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods[mod10] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods[mod11] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods[mod12] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods[mod13] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods[mod14] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods[mod15] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods[mod16] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods[mod17] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_replace[mode0] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_replace[mode1] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_replace[mode2] 0.04
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_replace[mode3] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_replace[mode4] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_replace[mode5] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_replace[mode6] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_replace[mode7] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_replace[mode8] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_replace[mode9] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_replace[mode10] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_replace[mode11] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_replace[mode12] 0.04
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_replace[mode13] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_replace[mode14] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_replace[mode15] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_replace[mode16] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_replace[mode17] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_delete[mode0] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_delete[mode1] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_delete[mode2] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_delete[mode3] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_delete[mode4] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_delete[mode5] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_delete[mode6] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_delete[mode7] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_delete[mode8] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_delete[mode9] 0.04
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_delete[mode10] 0.04
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_delete[mode11] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_delete[mode12] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_delete[mode13] 0.05
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_delete[mode14] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_delete[mode15] 0.29
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_delete[mode16] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_mods_delete[mode17] 0.03
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_search_positive_negative 0.36
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrbitStringMatch:bitStringMatch:='0001'B)-1] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseExactIA5Match:caseExactIA5Match:=Sprain)-1] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseExactMatch:caseExactMatch:=ÇélIné Ändrè)-1] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseExactMatch:caseExactOrderingMatch:=ÇélIné Ändrè)-5] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrgeneralizedTimeMatch:generalizedTimeMatch:=20100218171300Z)-1] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrgeneralizedTimeMatch:generalizedTimeOrderingMatch:=20100218171300Z)-6] 0.01
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrbooleanMatch:booleanMatch:=TRUE)-1] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreIA5Match:caseIgnoreIA5Match:=sprain1)-1] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreMatch:caseIgnoreMatch:=ÇélIné Ändrè1)-1] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreMatch:caseIgnoreOrderingMatch:=ÇélIné Ändrè1)-6] 0.01
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreListMatch:caseIgnoreListMatch:=foo1$bar)-1] 0.01
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrobjectIdentifierMatch:objectIdentifierMatch:=1.3.6.1.4.1.1466.115.121.1.15)-1] 0.01
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrdirectoryStringFirstComponentMatch:directoryStringFirstComponentMatch:=ÇélIné Ändrè1)-1] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrobjectIdentifierFirstComponentMatch:objectIdentifierFirstComponentMatch:=1.3.6.1.4.1.1466.115.121.1.15)-1] 0.01
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrdistinguishedNameMatch:distinguishedNameMatch:=cn=foo1,cn=bar)-1] 0.01
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrintegerMatch:integerMatch:=-2)-1] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrintegerMatch:integerOrderingMatch:=-2)-6] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrintegerFirstComponentMatch:integerFirstComponentMatch:=-2)-1] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attruniqueMemberMatch:uniqueMemberMatch:=cn=foo1,cn=bar#'0001'B)-1] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrnumericStringMatch:numericStringMatch:=00001)-10] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrnumericStringMatch:numericStringMatch:=00001)-11] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrtelephoneNumberMatch:telephoneNumberMatch:=+1 408 555 4798)-1] 0.01
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attroctetStringMatch:octetStringMatch:=AAAAAAAAAAAAAAE=)-1] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attroctetStringMatch:octetStringOrderingMatch:=AAAAAAAAAAAAAAE=)-6] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseExactMatch=*ÇélIné Ändrè*)-1] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseExactMatch=ÇélIné Ändrè*)-1] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseExactMatch=*ÇélIné Ändrè)-1] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseExactMatch=*é Ä*)-5] 0.01
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseExactIA5Match=*Sprain*)-1] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseExactIA5Match=Sprain*)-1] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseExactIA5Match=*Sprain)-1] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseExactIA5Match=*rai*)-3] 0.01
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreIA5Match=*sprain1*)-1] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreIA5Match=sprain1*)-1] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreIA5Match=*sprain1)-1] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreIA5Match=*rai*)-6] 0.01
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreMatch=*ÇélIné Ändrè1*)-1] 0.01
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreMatch=ÇélIné Ändrè1*)-1] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreMatch=*ÇélIné Ändrè1)-1] 0.01
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreMatch=*é Ä*)-6] 0.01
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreListMatch=*foo1$bar*)-1] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreListMatch=foo1$bar*)-1] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreListMatch=*foo1$bar)-1] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreListMatch=*1$b*)-1] 0.01
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrnumericStringMatch=*00001*)-1] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrnumericStringMatch=00001*)-1] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrnumericStringMatch=*00001)-1] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrnumericStringMatch=*000*)-6] 0.01
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrtelephoneNumberMatch=*+1 408 555 4798*)-1] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrtelephoneNumberMatch=+1 408 555 4798*)-1] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrtelephoneNumberMatch=*+1 408 555 4798)-1] 0.00
No log output captured.
Passed suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrtelephoneNumberMatch=* 55*)-6] 0.00
No log output captured.
Passed suites/filter/filter_indexing_test.py::test_positive[(|(|(ou=nothing1)(ou=people))(|(ou=nothing2)(ou=nothing3)))] 0.00
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/filter/filter_indexing_test.py::test_positive[(|(|(ou=people)(ou=nothing1))(|(ou=nothing2)(ou=nothing3)))] 0.00
No log output captured.
Passed suites/filter/filter_indexing_test.py::test_positive[(|(|(ou=nothing1)(ou=nothing2))(|(ou=people)(ou=nothing3)))] 0.00
No log output captured.
Passed suites/filter/filter_indexing_test.py::test_positive[(|(|(ou=nothing1)(ou=nothing2))(|(ou=nothing3)(ou=people)))] 0.00
No log output captured.
Passed suites/filter/filter_indexing_test.py::test_positive[(&(sn<=0000000000000000)(givenname>=FFFFFFFFFFFFFFFF))] 0.16
No log output captured.
Passed suites/filter/filter_indexing_test.py::test_positive[(&(sn>=0000000000000000)(sn<=1111111111111111))] 0.00
No log output captured.
Passed suites/filter/filter_indexing_test.py::test_positive[(&(sn>=0000000000000000)(givenname<=FFFFFFFFFFFFFFFF))] 0.00
No log output captured.
Passed suites/filter/filter_indexing_test.py::test_indexing_schema 3.49
No log output captured.
Passed suites/filter/filter_indexing_test.py::test_indexing[(uidNumber=18446744073709551617)] 0.15
No log output captured.
Passed suites/filter/filter_indexing_test.py::test_indexing[(gidNumber=18446744073709551617)] 0.15
No log output captured.
Passed suites/filter/filter_indexing_test.py::test_indexing[(MYINTATTR=18446744073709551617)] 0.00
No log output captured.
Passed suites/filter/filter_indexing_test.py::test_indexing[(&(uidNumber=*)(!(uidNumber=18446744073709551617)))] 0.16
No log output captured.
Passed suites/filter/filter_indexing_test.py::test_indexing[(&(gidNumber=*)(!(gidNumber=18446744073709551617)))] 0.01
No log output captured.
Passed suites/filter/filter_indexing_test.py::test_indexing[(&(uidNumber=*)(!(gidNumber=18446744073709551617)))] 0.15
No log output captured.
Passed suites/filter/filter_indexing_test.py::test_indexing[(&(myintattr=*)(!(myintattr=18446744073709551617)))] 0.00
No log output captured.
Passed suites/filter/filter_indexing_test.py::test_indexing[(uidNumber>=-18446744073709551617)] 0.01
No log output captured.
Passed suites/filter/filter_indexing_test.py::test_indexing[(gidNumber>=-18446744073709551617)] 0.16
No log output captured.
Passed suites/filter/filter_indexing_test.py::test_indexing[(uidNumber<=18446744073709551617)] 0.01
No log output captured.
Passed suites/filter/filter_indexing_test.py::test_indexing[(gidNumber<=18446744073709551617)] 0.01
No log output captured.
Passed suites/filter/filter_indexing_test.py::test_indexing[(myintattr<=18446744073709551617)] 0.00
No log output captured.
Passed suites/filter/filter_indexing_test.py::test_indexing_negative[(gidNumber=54321)] 0.17
No log output captured.
Passed suites/filter/filter_indexing_test.py::test_indexing_negative[(uidNumber=54321)] 0.00
No log output captured.
Passed suites/filter/filter_indexing_test.py::test_indexing_negative[(myintattr=54321)] 0.00
No log output captured.
Passed suites/filter/filter_indexing_test.py::test_indexing_negative[(gidNumber<=-999999999999999999999999999999)] 0.00
No log output captured.
Passed suites/filter/filter_indexing_test.py::test_indexing_negative[(uidNumber<=-999999999999999999999999999999)] 0.00
No log output captured.
Passed suites/filter/filter_indexing_test.py::test_indexing_negative[(myintattr<=-999999999999999999999999999999)] 0.00
No log output captured.
Passed suites/filter/filter_indexing_test.py::test_indexing_negative[(gidNumber>=999999999999999999999999999999)] 0.00
No log output captured.
Passed suites/filter/filter_indexing_test.py::test_indexing_negative[(uidNumber>=999999999999999999999999999999)] 0.00
No log output captured.
Passed suites/filter/filter_indexing_test.py::test_indexing_negative[(myintattr>=999999999999999999999999999999)] 0.00
No log output captured.
Passed suites/filter/filter_logic_test.py::test_eq 0.01
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/filter/filter_logic_test.py::test_sub 0.00
No log output captured.
Passed suites/filter/filter_logic_test.py::test_not_eq 0.00
No log output captured.
Passed suites/filter/filter_logic_test.py::test_ranges 0.01
No log output captured.
Passed suites/filter/filter_logic_test.py::test_and_eq 0.01
No log output captured.
Passed suites/filter/filter_logic_test.py::test_range 0.00
No log output captured.
Passed suites/filter/filter_logic_test.py::test_and_allid_shortcut 0.01
No log output captured.
Passed suites/filter/filter_logic_test.py::test_or_eq 0.01
No log output captured.
Passed suites/filter/filter_logic_test.py::test_and_not_eq 0.01
No log output captured.
Passed suites/filter/filter_logic_test.py::test_or_not_eq 0.00
No log output captured.
Passed suites/filter/filter_logic_test.py::test_and_range 0.02
No log output captured.
Passed suites/filter/filter_logic_test.py::test_or_range 0.01
No log output captured.
Passed suites/filter/filter_logic_test.py::test_and_and_eq 0.01
No log output captured.
Passed suites/filter/filter_logic_test.py::test_or_or_eq 0.01
No log output captured.
Passed suites/filter/filter_logic_test.py::test_and_or_eq 0.01
No log output captured.
Passed suites/filter/filter_logic_test.py::test_or_and_eq 0.26
No log output captured.
Passed suites/filter/filter_match_test.py::test_matching_rules 0.08
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/filter/filter_match_test.py::test_add_attribute_types 1.39
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule0] 0.02
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule1] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule2] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule3] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule4] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule5] 0.02
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule6] 0.02
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule7] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule8] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule9] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule10] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule11] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule12] 0.02
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule13] 0.02
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule14] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule15] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule16] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule17] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_modes[mode0] 0.04
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_modes[mode1] 0.03
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_modes[mode2] 0.03
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_modes[mode3] 0.03
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_modes[mode4] 0.03
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_modes[mode5] 0.03
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_modes[mode6] 0.03
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_modes[mode7] 0.03
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_modes[mode8] 0.03
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_modes[mode9] 0.03
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_modes[mode10] 0.03
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_modes[mode11] 0.03
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_modes[mode12] 0.03
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_modes[mode13] 0.03
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_modes[mode14] 0.03
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_modes[mode15] 0.03
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_modes[mode16] 0.03
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_modes[mode17] 0.03
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode0] 0.05
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode1] 0.05
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode2] 0.05
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode3] 0.05
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode4] 0.05
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode5] 0.06
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode6] 0.05
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode7] 0.05
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode8] 0.05
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode9] 0.05
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode10] 0.05
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode11] 0.05
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode12] 0.06
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode13] 0.05
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode14] 0.06
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode15] 0.05
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode16] 0.33
No log output captured.
Passed suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode17] 0.05
No log output captured.
Passed suites/filter/filter_match_test.py::test_match_count[(attrbitStringMatch='0001'B)-1-(attrbitStringMatch:bitStringMatch:='000100000'B)] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_match_count[(attrgeneralizedTimeMatch=20100218171300Z)-1-(attrcaseExactIA5Match=SPRAIN)] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_match_count[(attrcaseExactMatch>=ÇélIné Ändrè)-5-(attrcaseExactMatch=ÇéLINé ÄNDRè)] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_match_count[(attrcaseExactMatch:caseExactMatch:=ÇélIné Ändrè)-1-(attrcaseExactMatch>=çéliné ändrè)] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_match_count[(attrcaseExactIA5Match=Sprain)-1-(attrgeneralizedTimeMatch=20300218171300Z)] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_match_count[(attrbooleanMatch=TRUE)-1-(attrgeneralizedTimeMatch>=20300218171300Z)] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_match_count[(attrcaseIgnoreIA5Match=sprain1)-1-(attrcaseIgnoreIA5Match=sprain9999)] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_match_count[(attrcaseIgnoreMatch=ÇélIné Ändrè1)-1-(attrcaseIgnoreMatch=ÇélIné Ändrè9999)] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_match_count[(attrcaseIgnoreMatch>=ÇélIné Ändrè1)-6-(attrcaseIgnoreMatch>=ÇélIné Ändrè9999)] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_match_count[(attrcaseIgnoreListMatch=foo1$bar)-1-(attrcaseIgnoreListMatch=foo1$bar$baz$biff)] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_match_count[(attrobjectIdentifierMatch=1.3.6.1.4.1.1466.115.121.1.15)-1-(attrobjectIdentifierMatch=1.3.6.1.4.1.1466.115.121.1.15.99999)] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_match_count[(attrgeneralizedTimeMatch>=20100218171300Z)-6-(attroctetStringMatch>=AAAAAAAAAAABAQQ=)] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_match_count[(attrdirectoryStringFirstComponentMatch=ÇélIné Ändrè1)-1-(attrdirectoryStringFirstComponentMatch=ÇélIné Ändrè9999)] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_match_count[(attrobjectIdentifierFirstComponentMatch=1.3.6.1.4.1.1466.115.121.1.15)-1-(attrobjectIdentifierFirstComponentMatch=1.3.6.1.4.1.1466.115.121.1.15.99999)] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_match_count[(attrdistinguishedNameMatch=cn=foo1,cn=bar)-1-(attrdistinguishedNameMatch=cn=foo1,cn=bar,cn=baz)] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_match_count[(attrintegerMatch=-2)-1-(attrintegerMatch=-20)] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_match_count[(attrintegerMatch>=-2)-6-(attrintegerMatch>=20)] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_match_count[(attrintegerFirstComponentMatch=-2)-1-(attrintegerFirstComponentMatch=-20)] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_match_count[(attruniqueMemberMatch=cn=foo1,cn=bar#'0001'B)-1-(attruniqueMemberMatch=cn=foo1,cn=bar#'00010000'B)] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_match_count[(attrnumericStringMatch=00001)-1-(attrnumericStringMatch=000000001)] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_match_count[(attrnumericStringMatch>=00001)-6-(attrnumericStringMatch>=01)] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_match_count[(attrtelephoneNumberMatch=+1 408 555 4798)-1-(attrtelephoneNumberMatch=+2 408 555 4798)] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_match_count[(attroctetStringMatch=AAAAAAAAAAAAAAE=)-1-(attroctetStringMatch=AAAAAAAAAAAAAAEB)] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_match_count[(attroctetStringMatch>=AAAAAAAAAAAAAAE=)-6-(attroctetStringMatch>=AAAAAAAAAAABAQE=)] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrbitStringMatch:bitStringMatch:='0001'B)-1] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrcaseExactIA5Match:caseExactIA5Match:=Sprain)-1] 0.00
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrcaseExactMatch:caseExactMatch:=ÇélIné Ändrè)-1] 0.00
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrcaseExactMatch:caseExactOrderingMatch:=ÇélIné Ändrè)-5] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrgeneralizedTimeMatch:generalizedTimeMatch:=20100218171300Z)-1] 0.00
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrgeneralizedTimeMatch:generalizedTimeOrderingMatch:=20100218171300Z)-6] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrbooleanMatch:booleanMatch:=TRUE)-1] 0.00
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreIA5Match:caseIgnoreIA5Match:=sprain1)-1] 0.00
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreMatch:caseIgnoreMatch:=ÇélIné Ändrè1)-1] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreMatch:caseIgnoreOrderingMatch:=ÇélIné Ändrè1)-6] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreListMatch:caseIgnoreListMatch:=foo1$bar)-1] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrobjectIdentifierMatch:objectIdentifierMatch:=1.3.6.1.4.1.1466.115.121.1.15)-1] 0.16
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrdirectoryStringFirstComponentMatch:directoryStringFirstComponentMatch:=ÇélIné Ändrè1)-1] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrobjectIdentifierFirstComponentMatch:objectIdentifierFirstComponentMatch:=1.3.6.1.4.1.1466.115.121.1.15)-1] 0.00
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrdistinguishedNameMatch:distinguishedNameMatch:=cn=foo1,cn=bar)-1] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrintegerMatch:integerMatch:=-2)-1] 0.00
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrintegerMatch:integerOrderingMatch:=-2)-6] 0.00
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrintegerFirstComponentMatch:integerFirstComponentMatch:=-2)-1] 0.00
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attruniqueMemberMatch:uniqueMemberMatch:=cn=foo1,cn=bar#'0001'B)-1] 0.00
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrnumericStringMatch:numericStringMatch:=00001)-10] 0.00
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrnumericStringMatch:numericStringMatch:=00001)-11] 0.00
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrtelephoneNumberMatch:telephoneNumberMatch:=+1 408 555 4798)-1] 0.00
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attroctetStringMatch:octetStringMatch:=AAAAAAAAAAAAAAE=)-1] 0.00
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attroctetStringMatch:octetStringOrderingMatch:=AAAAAAAAAAAAAAE=)-6] 0.00
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrcaseExactMatch=*ÇélIné Ändrè*)-1] 0.00
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrcaseExactMatch=ÇélIné Ändrè*)-1] 0.00
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrcaseExactMatch=*ÇélIné Ändrè)-1] 0.00
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrcaseExactMatch=*é Ä*)-5] 0.00
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrcaseExactIA5Match=*Sprain*)-1] 0.00
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrcaseExactIA5Match=Sprain*)-1] 0.00
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrcaseExactIA5Match=*Sprain)-1] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrcaseExactIA5Match=*rai*)-3] 0.00
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreIA5Match=*sprain1*)-1] 0.00
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreIA5Match=sprain1*)-1] 0.00
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreIA5Match=*sprain1)-1] 0.00
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreIA5Match=*rai*)-6] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreMatch=*ÇélIné Ändrè1*)-1] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreMatch=ÇélIné Ändrè1*)-1] 0.00
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreMatch=*ÇélIné Ändrè1)-1] 0.00
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreMatch=*é Ä*)-6] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreListMatch=*foo1$bar*)-1] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreListMatch=foo1$bar*)-1] 0.00
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreListMatch=*foo1$bar)-1] 0.00
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreListMatch=*1$b*)-1] 0.00
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrnumericStringMatch=*00001*)-1] 0.00
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrnumericStringMatch=00001*)-1] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrnumericStringMatch=*00001)-1] 0.00
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrnumericStringMatch=*000*)-6] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrtelephoneNumberMatch=*+1 408 555 4798*)-1] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrtelephoneNumberMatch=+1 408 555 4798*)-1] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrtelephoneNumberMatch=*+1 408 555 4798)-1] 0.01
No log output captured.
Passed suites/filter/filter_match_test.py::test_extensible_search[(attrtelephoneNumberMatch=* 55*)-6] 0.01
No log output captured.
Passed suites/filter/filter_test.py::test_filter_escaped 0.29
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.filter_test:filter_test.py:42 Running test_filter_escaped... INFO  tests.suites.filter.filter_test:filter_test.py:78 test_filter_escaped: PASSED
Passed suites/filter/filter_test.py::test_filter_search_original_attrs 0.15
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.filter_test:filter_test.py:95 Running test_filter_search_original_attrs... INFO  tests.suites.filter.filter_test:filter_test.py:108 test_filter_search_original_attrs: PASSED
Passed suites/filter/filter_test.py::test_filter_scope_one 0.15
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.filter_test:filter_test.py:125 Search user using ldapsearch with scope one INFO  tests.suites.filter.filter_test:filter_test.py:127 [dn: ou=services,dc=example,dc=com ou: services ] INFO  tests.suites.filter.filter_test:filter_test.py:129 Search should only have one entry
Passed suites/filter/filter_test.py::test_filter_with_attribute_subtype 0.31
-------------------------------Captured log call--------------------------------
INFO  lib389:filter_test.py:154 Bind as cn=Directory Manager INFO  lib389:filter_test.py:161 ######################### ADD ###################### INFO  lib389:filter_test.py:184 Try to add Add cn=test_entry both, dc=example,dc=com: dn: cn=test_entry both, dc=example,dc=com cn: test_entry both cn;en: test_entry en cn;fr: test_entry fr objectclass: top objectclass: person sn: test_entry both INFO  lib389:filter_test.py:187 Try to add Add cn=test_entry en only, dc=example,dc=com: dn: cn=test_entry en only, dc=example,dc=com cn: test_entry en only cn;en: test_entry en objectclass: top objectclass: person sn: test_entry en only INFO  lib389:filter_test.py:190 ######################### SEARCH ###################### INFO  lib389:filter_test.py:194 Try to search with filter (&(sn=test_entry en only)(!(cn=test_entry fr))) INFO  lib389:filter_test.py:198 Found cn=test_entry en only,dc=example,dc=com INFO  lib389:filter_test.py:202 Try to search with filter (&(sn=test_entry en only)(!(cn;fr=test_entry fr))) INFO  lib389:filter_test.py:206 Found cn=test_entry en only,dc=example,dc=com INFO  lib389:filter_test.py:210 Try to search with filter (&(sn=test_entry en only)(!(cn;en=test_entry en))) INFO  lib389:filter_test.py:213 Found none INFO  lib389:filter_test.py:215 ######################### DELETE ###################### INFO  lib389:filter_test.py:217 Try to delete cn=test_entry both, dc=example,dc=com INFO  lib389:filter_test.py:220 Try to delete cn=test_entry en only, dc=example,dc=com INFO  tests.suites.filter.filter_test:filter_test.py:223 Testcase PASSED
Passed suites/filter/filter_test.py::test_extended_search 0.03
-------------------------------Captured log call--------------------------------
INFO  tests.suites.filter.filter_test:filter_test.py:250 Running test_filter_escaped... INFO  lib389:filter_test.py:267 Try to search with filter (cn:de:=ext-test-entry) INFO  lib389:filter_test.py:273 Try to search with filter (cn:caseIgnoreIA5Match:=EXT-TEST-ENTRY) INFO  lib389:filter_test.py:279 Try to search with filter (cn:caseIgnoreMatch:=EXT-TEST-ENTRY) INFO  lib389:filter_test.py:285 Try to search with filter (cn:caseExactMatch:=EXT-TEST-ENTRY) INFO  lib389:filter_test.py:291 Try to search with filter (cn:caseExactMatch:=ext-test-entry) INFO  lib389:filter_test.py:297 Try to search with filter (cn:caseExactIA5Match:=EXT-TEST-ENTRY) INFO  lib389:filter_test.py:303 Try to search with filter (cn:caseExactIA5Match:=ext-test-entry)
Passed suites/filter/filter_with_non_root_user_test.py::test_telephone[(telephonenumber=*7393)] 0.04
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/filter/filter_with_non_root_user_test.py::test_telephone[(telephonenumber=*408*3)] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid=mward)] 0.04
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(l=sunnyvale)0] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(mail=jreu*)] 0.01
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(mail=*exam*)] 0.01
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid=*)] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.15.1:=>AAA)] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:es:=>AAA)] 0.04
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.15.1.5:=AAA)] 0.04
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.15.1:=>user100)] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:es:=>user100)] 0.04
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.15.1.5:=user100)] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.15.1.1:=user1)] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.15.1.1:=z)] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid=user1)] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid<=Z)] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid>=1)] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid>=A)] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid>=user20)] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.18.1.2:=user20)] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.18.1.2:=z)] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.18.1:=>=A)] 0.29
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:fr:=>=A)] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.18.1.4:=A)] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.18.1:=>=user20)] 0.04
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:fr:=>=user20)] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.18.1.4:=user20)] 0.04
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.18.1:=>=z)] 0.04
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:fr:=>=z)] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(description=This is the special * attribute value)] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(description=*x*)] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid=ptyler)] 0.01
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid=*wal*)] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(roomNumber=0312)] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid=mw*)] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(roomNumber=2295)] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(l=Cupertino)] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(objectclass=inetorgperson)] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(l=sunnyvale)1] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(roomNumber=200)] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(roomNumber=201)] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(roomNumber=202)] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(l=*)] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(sn~=tiller))(!(uid=ptyler)))] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(sn~=tiller)) (uid=ptyler))] 0.01
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(uid=*wal*) (roomNumber=0312))] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(uid=*wal*))(!(roomNumber=0312)))] 0.04
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(uid=*wal*))(roomNumber=0312))] 0.04
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(uid=*wal*)(!(roomNumber=0312)))] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(uid=*wal*)(|(sn~=tiller) (roomNumber=2295)))] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(&(uid=*wal*) (roomNumber=2295))(&(uid=*wal*) (sn~=tiller)))] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(uid=*wal*)(&(sn~=tiller) (roomNumber=2295)))0] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(|(uid=*wal*) (sn~=tiller))(|(uid=*wal*) (roomNumber=2295)))0] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(uid=*wal*) (roomNumber=2295))0] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(roomNumber=2295) (uid=*wal*))0] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(roomNumber=2295) (uid=*wal*))0] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(uid=*wal*)(&(sn~=tiller) (roomNumber=2295)))1] 0.04
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(uid=*wal*) (roomNumber=2295))] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(uid=*wal*) (l=*))] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(l=Cupertino) (|(uid=*wal*)(&(sn~=tiller) (roomNumber=2295))))] 0.27
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(l=Cupertino))(!(|(uid=*wal*)(&(sn~=tiller) (roomNumber=2295)))))] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(l=Cupertino))(|(uid=*wal*)(&(sn~=tiller) (roomNumber=2295))))] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(l=Cupertino)(!(|(uid=*wal*)(&(sn~=tiller) (roomNumber=2295)))))] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(l=Cupertino) (|(uid=*wal*)(&(sn~=tiller) (roomNumber=2295))))] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(l=Cupertino))(!(|(uid=*wal*)(&(sn~=tiller) (roomNumber=2295)))))] 0.04
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(l=Cupertino))(|(uid=*wal*)(&(sn~=tiller) (roomNumber=2295))))] 0.05
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(l=Cupertino)(!(|(uid=*wal*)(&(sn~=tiller) (roomNumber=2295)))))] 0.04
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(uid=user1))(objectclass=inetorgperson))] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(uid=user1))(objectclass=inetorgperson))] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(mail=cnewport@example.com))(l=sunnyvale))] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(|(uid=*wal*) (sn~=tiller))(|(uid=*wal*) (roomNumber=2295)))1] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(uid=*wal*) (roomNumber=2295))1] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(roomNumber=2295) (uid=*wal*))1] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(roomNumber=2295) (uid=*wal*))1] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(roomNumber=200))(!(roomNumber=201))(!(roomNumber=202))(l=sunnyvale))] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(uid=user40))(&(!(uid=user1))(!(uid=user20))(!(uid=user30))(objectclass=inetorgperson)))] 0.04
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(uid=user40))(&(!(uid=user1))(!(uid=user20))(!(uid=user30))(objectclass=inetorgperson)))] 0.04
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(roomNumber=2254))(&(!(roomNumber=200))(!(roomNumber=201))(!(roomNumber=202))(l=sunnyvale)))] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(roomNumber=2254))(&(!(roomNumber=200))(!(roomNumber=201))(!(roomNumber=202))(l=sunnyvale)))] 0.05
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(uid=user1))(!(uid:2.16.840.1.113730.3.3.2.18.1:=<=user20))(!(uid=user30)))] 0.04
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(uid=user1))(!(uid:2.16.840.1.113730.3.3.2.18.1:=<=user20))(!(uid=user30)))] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(roomNumber=4012))(!(roomNumber=3924))(!(roomNumber=4508)))] 0.04
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(roomNumber=4012))(!(roomNumber=3924))(!(roomNumber=4508)))] 0.04
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(& (objectclass=inetorgperson)(!(uid=user1))(!(uid:2.16.840.1.113730.3.3.2.18.1:=<=user20))(!(uid=user30)))] 0.04
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(| (objectclass=inetorgperson)(!(uid=user1))(!(uid:2.16.840.1.113730.3.3.2.18.1:=<=user20))(!(uid=user30)))] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(l=sunnyvale)(!(roomNumber=4012))(!(roomNumber=3924))(!(roomNumber=4508)))] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(l=sunnyvale)(!(roomNumber=4012))(!(roomNumber=3924))(!(roomNumber=4508)))] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(!(|(!(l=*))(!(l=sunnyvale))))] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(l=*))(!(l=sunnyvale)))] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(l=Cupertino))(!(mail=*exam*))(!(|(uid=*wal*) (l=*))))] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(l=Cupertino))(!(mail=*exam*))(|(uid=*wal*) (l=*)))] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(l=Cupertino))(mail=*exam*) (|(uid=*wal*) (l=*)))] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(l=Cupertino) (mail=*exam*) (|(uid=*wal*) (l=*)))] 0.04
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(l=Cupertino))(!(mail=*exam*))(!(|(uid=*wal*) (l=*))))] 0.28
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(l=Cupertino))(!(mail=*exam*))(|(uid=*wal*) (l=*)))] 0.04
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(l=Cupertino))(mail=*exam*)(!(|(uid=*wal*) (l=*))))] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(l=Cupertino))(mail=*exam*) (|(uid=*wal*) (l=*)))] 0.04
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(l=Cupertino)(!(mail=*exam*))(!(|(uid=*wal*) (l=*))))] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(l=Cupertino)(!(mail=*exam*))(|(uid=*wal*) (l=*)))] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(l=Cupertino) (mail=*exam*)(!(|(uid=*wal*) (l=*))))] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(l=Cupertino)(!(mail=*exam*))(|(uid=*wal*) (l=*)))] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_negative[(userpassword=*)] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_negative[(fred=*)] 0.01
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:2.16.840.1.113730.3.3.2.15.1:=<1)] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:es:=<1)] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:2.16.840.1.113730.3.3.2.15.1.1:=1)] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:2.16.840.1.113730.3.3.2.15.1:=<user1)] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:es:=<user1)] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:2.16.840.1.113730.3.3.2.15.1:=<z)] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:es:=<z)] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid<=1)] 0.12
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid<=A)] 0.01
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid>=Z)] 0.01
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:2.16.840.1.113730.3.3.2.18.1:=<=A)] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:fr:=<=A)] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:2.16.840.1.113730.3.3.2.18.1.2:=A)] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:2.16.840.1.113730.3.3.2.18.1:=<=user20)] 0.03
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:fr:=<=user20)] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:2.16.840.1.113730.3.3.2.18.1:=<=z)] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:fr:=<=z)] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:2.16.840.1.113730.3.3.2.18.1.4:=z)] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_negative[(sn~=tiller)] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_negative[(givenName~=pricella)] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_negative[(mail=cnewport@example.com)] 0.01
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid=user20)] 0.01
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid=user30)] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid=user40)] 0.01
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_negative[(&(sn~=tiller) (givenName~=pricella))] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_negative[(&(sn~=tiller)(!(uid=ptyler)))] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_negative[(&(!(l=Cupertino))(mail=*exam*)(!(|(uid=*wal*) (l=*))))] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_negative[(&(l=Cupertino)(!(mail=*exam*))(!(|(uid=*wal*) (l=*))))] 0.02
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_negative[(&(l=Cupertino) (mail=*exam*)(!(|(uid=*wal*) (l=*))))] 0.01
No log output captured.
Passed suites/filter/filter_with_non_root_user_test.py::test_all_negative[(&(l=Cupertino) (mail=*exam*) (|(uid=*wal*) (l=*)))] 0.02
No log output captured.
Passed suites/filter/filterscanlimit_test.py::test_invalid_configuration 0.59
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/filter/filterscanlimit_test.py::test_idlistscanlimit 9.50
No log output captured.
Passed suites/filter/large_filter_test.py::test_large_filter[(&(objectClass=person)(|(manager=uid=fmcdonnagh,dc=anuj,dc=com)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_0,dc=anuj,dc=com)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_1,dc=anuj,dc=com)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_2,dc=anuj,dc=com)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_3,dc=anuj,dc=com)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_4,dc=anuj,dc=com)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_5,dc=anuj,dc=com)(manager=uid=jvedder, dc=anuj, dc=com)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_6,dc=anuj,dc=com)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_7,dc=anuj,dc=com)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_8,dc=anuj,dc=com)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_9,dc=anuj,dc=com)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_10,dc=anuj,dc=com)(manager=uid=cnewport, dc=anuj, dc=com)))] 0.05
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/filter/large_filter_test.py::test_large_filter[(&(objectClass=person)(|(manager=uid=fmcdonnagh *)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_0,*)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_1,*)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_2,*)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_3,*)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_4,*)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_5,*)(manager=uid=jvedder,*)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_6,*)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_7,*)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_8,*)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_9,*)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_10,*)(manager=uid=cnewport,*)))] 0.02
No log output captured.
Passed suites/filter/rfc3673_all_oper_attrs_test.py::test_supported_features 0.00
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[-False-oper_attr_list0-] 0.19
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager
Passed suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[-False-oper_attr_list0-*] 0.01
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager
Passed suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[-False-oper_attr_list0-objectClass] 0.01
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager
Passed suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[-True-oper_attr_list1-] 0.03
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:rfc3673_all_oper_attrs_test.py:158 bound as: uid=all_attrs_test,ou=people,dc=example,dc=com
Passed suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[-True-oper_attr_list1-*] 0.01
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:rfc3673_all_oper_attrs_test.py:158 bound as: uid=all_attrs_test,ou=people,dc=example,dc=com
Passed suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[-True-oper_attr_list1-objectClass] 0.01
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:rfc3673_all_oper_attrs_test.py:158 bound as: uid=all_attrs_test,ou=people,dc=example,dc=com
Passed suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[ou=people,dc=example,dc=com-False-oper_attr_list2-] 0.03
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager
Passed suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[ou=people,dc=example,dc=com-False-oper_attr_list2-*] 0.01
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager
Passed suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[ou=people,dc=example,dc=com-False-oper_attr_list2-objectClass] 0.01
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager
Passed suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[ou=people,dc=example,dc=com-True-oper_attr_list3-] 0.03
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:rfc3673_all_oper_attrs_test.py:158 bound as: uid=all_attrs_test,ou=people,dc=example,dc=com
Passed suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[ou=people,dc=example,dc=com-True-oper_attr_list3-*] 0.01
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:rfc3673_all_oper_attrs_test.py:158 bound as: uid=all_attrs_test,ou=people,dc=example,dc=com
Passed suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[ou=people,dc=example,dc=com-True-oper_attr_list3-objectClass] 0.01
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:rfc3673_all_oper_attrs_test.py:158 bound as: uid=all_attrs_test,ou=people,dc=example,dc=com
Passed suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[uid=all_attrs_test,ou=people,dc=example,dc=com-False-oper_attr_list4-] 0.03
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager
Passed suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[uid=all_attrs_test,ou=people,dc=example,dc=com-False-oper_attr_list4-*] 0.01
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager
Passed suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[uid=all_attrs_test,ou=people,dc=example,dc=com-False-oper_attr_list4-objectClass] 0.01
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager
Passed suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[uid=all_attrs_test,ou=people,dc=example,dc=com-True-oper_attr_list5-] 0.04
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:rfc3673_all_oper_attrs_test.py:158 bound as: uid=all_attrs_test,ou=people,dc=example,dc=com
Passed suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[uid=all_attrs_test,ou=people,dc=example,dc=com-True-oper_attr_list5-*] 0.01
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:rfc3673_all_oper_attrs_test.py:158 bound as: uid=all_attrs_test,ou=people,dc=example,dc=com
Passed suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[uid=all_attrs_test,ou=people,dc=example,dc=com-True-oper_attr_list5-objectClass] 0.01
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:rfc3673_all_oper_attrs_test.py:158 bound as: uid=all_attrs_test,ou=people,dc=example,dc=com
Passed suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[cn=config-False-oper_attr_list6-] 0.03
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager
Passed suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[cn=config-False-oper_attr_list6-*] 0.01
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager
Passed suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[cn=config-False-oper_attr_list6-objectClass] 0.01
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager
Passed suites/filter/schema_validation_test.py::test_filter_validation_config 0.21
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/filter/schema_validation_test.py::test_filter_validation_enabled 4.15
No log output captured.
Passed suites/filter/schema_validation_test.py::test_filter_validation_warn_safe 0.34
No log output captured.
Passed suites/filter/schema_validation_test.py::test_filter_validation_warn_unsafe 0.14
No log output captured.
Passed suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition0-cn] 0.01
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition1-cn] 0.01
No log output captured.
Passed suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition2-cn] 0.01
No log output captured.
Passed suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition3-cn] 0.01
No log output captured.
Passed suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition4-modifiersName] 0.01
No log output captured.
Passed suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition5-modifyTimestamp] 0.01
No log output captured.
Passed suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition6-modifiersName] 0.00
No log output captured.
Passed suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition7-modifyTimestamp] 0.01
No log output captured.
Passed suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition8-modifiersName] 0.01
No log output captured.
Passed suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition9-modifyTimestamp] 0.01
No log output captured.
Passed suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition10-cn] 0.01
No log output captured.
Passed suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition11-cn] 0.00
No log output captured.
Passed suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition12-modifiersName] 0.01
No log output captured.
Passed suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition13-nsRoleDN] 0.01
No log output captured.
Passed suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition14-cn] 0.01
No log output captured.
Passed suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition15-modifiersName] 0.01
No log output captured.
Passed suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition16-nsRoleDN] 0.01
No log output captured.
Passed suites/filter/vfilter_attribute_test.py::test_all_together_positive[(uid=rjense2)-condition17-mailquota] 0.00
No log output captured.
Passed suites/filter/vfilter_attribute_test.py::test_all_together_positive[(uid=rjense2)-condition18-mailquota] 0.00
No log output captured.
Passed suites/filter/vfilter_attribute_test.py::test_all_together_positive[(uid=rjense2)-condition19-mailquota] 0.00
No log output captured.
Passed suites/filter/vfilter_attribute_test.py::test_all_together_positive[(uid=rjense2)-condition20-mailquota] 0.01
No log output captured.
Passed suites/filter/vfilter_attribute_test.py::test_all_together_positive[(uid=rjense2)-condition21-nsRoleDN] 0.00
No log output captured.
Passed suites/filter/vfilter_attribute_test.py::test_all_together_positive[(nsRoleDN=cn=new managed *)-condition22-cn] 0.01
No log output captured.
Passed suites/filter/vfilter_attribute_test.py::test_all_together_positive[(nsRoleDN=cn=new managed *)-condition23-nsRoleDN] 0.01
No log output captured.
Passed suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (nsRoleDN=cn=new managed *) (uid=mtyler))-condition24-mailquota] 0.00
No log output captured.
Passed suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (nsRoleDN=cn=new managed *) (uid=mtyler))-condition25-nsRoleDN] 0.00
No log output captured.
Passed suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (nsRoleDN=cn=new managed *) (uid=mtyler))-condition26-mailquota] 0.00
No log output captured.
Passed suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (nsRoleDN=cn=new managed *) (uid=mtyler))-condition27-modifiersName] 0.01
No log output captured.
Passed suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (nsRoleDN=cn=new managed *) (uid=mtyler))-condition28-nsRoleDN] 0.00
No log output captured.
Passed suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (nsRoleDN=cn=new managed *) (uid=mtyler))-condition29-nsRoleDN] 0.00
No log output captured.
Passed suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (nsRoleDN=cn=new managed *) (uid=mtyler))-condition30-modifiersName] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(postalCode=99999)] 0.01
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(postalAddress=345 California Av., Mountain View, CA)] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(postalCode:2.16.840.1.113730.3.3.2.7.1:=88888)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(postalCode:2.16.840.1.113730.3.3.2.7.1.3:=66666)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(emailclass=vpe*)] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(emailclass=*emai*)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota=*00)] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota=*6*0)] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(nsRole=*)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(postalAddress=*)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(emailclass:2.16.840.1.113730.3.3.2.15.1:=>AAA)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(emailclass:es:=>AAA)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(emailclass:2.16.840.1.113730.3.3.2.15.1.5:=AAA)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(emailclass:2.16.840.1.113730.3.3.2.15.1:=>vpemail)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(emailclass:es:=>vpemail)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota:2.16.840.1.113730.3.3.2.15.1.1:=900)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota<=600)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota>=600)] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(nsRole~=cn=new)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(uid=*wal*)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(uid=mw*)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(roomNumber=0312)] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(l=Cupertino)] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(uid=user1)] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(objectclass=inetorgperson)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(l=sunnyvale)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(roomNumber=3924)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(l=*)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(objectclass=*)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota<=900)] 0.15
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota>=100)] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota:2.16.840.1.113730.3.3.2.18.1.2:=600)] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota:2.16.840.1.113730.3.3.2.18.1.2:=900)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota:2.16.840.1.113730.3.3.2.18.1:=>=900)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota:fr:=>=900)] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota:2.16.840.1.113730.3.3.2.18.1:=>=600)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota:fr:=>=600)] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota:2.16.840.1.113730.3.3.2.18.1.4:=600)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota:2.16.840.1.113730.3.3.2.18.1:=>=100)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota:fr:=>=100)] 0.17
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota:2.16.840.1.113730.3.3.2.18.1.4:=100)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(nsRole~=cn=new managed))(!(nsRole=cn=new vaddr filtered role,dc=example,dc=com)))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(|(uid=*wal*) (nsRole=cn=*another*))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(uid=*wal*))(!(nsRole=cn=*another*)))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(uid=*wal*))(nsRole=cn=*another*))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(|(uid=*wal*)(!(nsRole=cn=*another*)))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(&(uid=*wal*)(|(nsRole~=cn=new managed) (l=Cupertino)))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(|(&(uid=*wal*) (l=Cupertino))(&(uid=*wal*) (nsRole~=cn=new managed)))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(|(uid=*wal*)(&(nsRole~=cn=new managed) (l=Cupertino)))] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(&(|(uid=*wal*) (nsRole~=cn=new managed))(|(uid=*wal*) (l=Cupertino)))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(|(nsRole=cn=*vaddr*) (uid=*wal*))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(|(uid=*wal*)(&(nsRole~=cn=new managed) (nsRole=cn=*vaddr*)))] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(|(uid=*wal*) (nsRole=cn=*vaddr*))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(|(nsRole=cn=*vaddr*) (l=*))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(&(l=Cupertino) (|(uid=*wal*)(&(nsRole~=cn=new managed) (nsRole=cn=*vaddr*))))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(l=Cupertino))(!(|(uid=*wal*)(&(nsRole~=cn=new managed) (nsRole=cn=*vaddr*)))))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(l=Cupertino))(|(uid=*wal*)(&(nsRole~=cn=new managed) (nsRole=cn=*vaddr*))))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(&(l=Cupertino)(!(|(uid=*wal*)(&(nsRole~=cn=new managed) (nsRole=cn=*vaddr*)))))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(|(l=Cupertino) (|(uid=*wal*)(&(nsRole~=cn=new managed) (nsRole=cn=*vaddr*))))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(l=Cupertino))(!(|(uid=*wal*)(&(nsRole~=cn=new managed) (nsRole=cn=*vaddr*)))))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(l=Cupertino))(|(uid=*wal*)(&(nsRole~=cn=new managed) (nsRole=cn=*vaddr*))))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(|(l=Cupertino)(!(|(uid=*wal*)(&(nsRole~=cn=new managed) (nsRole=cn=*vaddr*)))))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(uid=user1))(objectclass=inetorgperson))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(uid=user1))(objectclass=inetorgperson))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(nsRole=cn=*vaddr*))(l=sunnyvale))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(nsRole=cn=*vaddr*))(l=sunnyvale))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(uid=user1))(!(uid=user20))(!(uid=user30))(objectclass=inetorgperson))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(uid=user1))(!(uid=user20))(!(uid=user30))(objectclass=inetorgperson))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(nsRole=cn=another vaddr role,dc=example,dc=com))(!(roomNumber=3924))(!(roomNumber=4508))(l=sunnyvale))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(nsRole=cn=another vaddr role,dc=example,dc=com))(!(roomNumber=3924))(!(roomNumber=4508))(l=sunnyvale))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(uid=user40))(&(!(uid=user1))(!(uid=user20))(!(uid=user30))(objectclass=inetorgperson)))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(uid=user40))(&(!(uid=user1))(!(uid=user20))(!(uid=user30))(objectclass=inetorgperson)))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(roomNumber=2254))(&(!(nsRole=cn=another vaddr role,dc=example,dc=com))(!(roomNumber=3924))(!(roomNumber=4508))(l=sunnyvale)))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(roomNumber=2254))(&(!(nsRole=cn=another vaddr role,dc=example,dc=com))(!(roomNumber=3924))(!(roomNumber=4508))(l=sunnyvale)))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(uid=user1))(!(uid=user20))(!(uid=user30)))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(uid=user1))(!(uid=user20))(!(uid=user30)))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(nsRole=cn=another vaddr role,dc=example,dc=com))(!(roomNumber=3924))(!(roomNumber=4508)))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(nsRole=cn=another vaddr role,dc=example,dc=com))(!(roomNumber=3924))(!(roomNumber=4508)))] 0.14
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(& (objectclass=inetorgperson)(!(uid=user1))(!(uid=user20))(!(uid=user30)))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(| (objectclass=inetorgperson)(!(uid=user1))(!(uid=user20))(!(uid=user30)))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(&(l=sunnyvale)(!(nsRole=cn=another vaddr role,dc=example,dc=com))(!(roomNumber=3924))(!(roomNumber=4508)))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(|(l=sunnyvale)(!(nsRole=cn=another vaddr role,dc=example,dc=com))(!(roomNumber=3924))(!(roomNumber=4508)))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(!(|(!(l=*))(!(l=sunnyvale))))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(l=*))(!(l=sunnyvale)))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(&(l=Cupertino) (emailclass=*emai*) (|(nsRole=cn=*vaddr*) (l=*)))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(l=Cupertino))(!(emailclass=*emai*))(!(|(nsRole=cn=*vaddr*) (l=*))))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(l=Cupertino))(!(emailclass=*emai*))(|(nsRole=cn=*vaddr*) (l=*)))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(l=Cupertino))(emailclass=*emai*) (|(nsRole=cn=*vaddr*) (l=*)))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(&(l=Cupertino)(!(emailclass=*emai*))(|(nsRole=cn=*vaddr*) (l=*)))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(|(l=Cupertino) (emailclass=*emai*) (|(nsRole=cn=*vaddr*) (l=*)))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(l=Cupertino))(!(emailclass=*emai*))(!(|(nsRole=cn=*vaddr*) (l=*))))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(l=Cupertino))(!(emailclass=*emai*))(|(nsRole=cn=*vaddr*) (l=*)))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(l=Cupertino))(emailclass=*emai*)(!(|(nsRole=cn=*vaddr*) (l=*))))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(l=Cupertino))(emailclass=*emai*) (|(nsRole=cn=*vaddr*) (l=*)))] 0.02
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(|(l=Cupertino)(!(emailclass=*emai*))(!(|(nsRole=cn=*vaddr*) (l=*))))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(|(l=Cupertino)(!(emailclass=*emai*))(|(nsRole=cn=*vaddr*) (l=*)))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_positive[(|(l=Cupertino) (emailclass=*emai*)(!(|(nsRole=cn=*vaddr*) (l=*))))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(postalCode:de:==77777)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(fred=*)] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(emailclass:2.16.840.1.113730.3.3.2.15.1.5:=vpemail)] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(emailclass:2.16.840.1.113730.3.3.2.15.1:=<1)] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(emailclass:es:=<1)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(emailclass:2.16.840.1.113730.3.3.2.15.1.1:=1)] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(emailclass:2.16.840.1.113730.3.3.2.15.1:=<vpemail)] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(emailclass:es:=<vpemail)] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(emailclass:2.16.840.1.113730.3.3.2.15.1.1:=vpemail)] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota:2.16.840.1.113730.3.3.2.15.1:=<900)] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota:es:=<900)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota<=100)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota>=900)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(nsRole~=cn=new managed)] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(nsRole=cn=new vaddr filtered role,dc=example,dc=com)] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(nsRole=cn=*another*)] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(nsRole=cn=*vaddr*)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(uid=user20)] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(uid=user30)] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(nsRole=cn=another vaddr role,dc=example,dc=com)] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(roomNumber=4508)] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(uid=user40)] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(roomNumber=2254)] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota:2.16.840.1.113730.3.3.2.18.1:=<=100)] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota:fr:=<=100)] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota:2.16.840.1.113730.3.3.2.18.1.2:=100)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota:2.16.840.1.113730.3.3.2.18.1:=<=600)] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota:fr:=<=600)] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota:2.16.840.1.113730.3.3.2.18.1:=<=900)] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota:fr:=<=900)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota:2.16.840.1.113730.3.3.2.18.1.4:=900)] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(Description=This is the special \2a attribute value)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(Description=*\2a*)] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(&(nsRole~=cn=new managed) (nsRole=cn=new vaddr filtered role,dc=example,dc=com))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(&(!(nsRole~=cn=new managed)) (nsRole=cn=new vaddr filtered role,dc=example,dc=com))] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(&(nsRole~=cn=new managed)(!(nsRole=cn=new vaddr filtered role,dc=example,dc=com)))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(&(uid=*wal*) (nsRole=cn=*vaddr*))] 0.00
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(&(nsRole=cn=*vaddr*) (uid=*wal*))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(&(!(l=Cupertino))(emailclass=*emai*)(!(|(nsRole=cn=*vaddr*) (l=*))))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(&(l=Cupertino)(!(emailclass=*emai*))(!(|(nsRole=cn=*vaddr*) (l=*))))] 0.01
No log output captured.
Passed suites/filter/vfilter_simple_test.py::test_param_negative[(&(l=Cupertino) (emailclass=*emai*)(!(|(nsRole=cn=*vaddr*) (l=*))))] 0.00
No log output captured.
Passed suites/fourwaymmr/fourwaymmr_test.py::test_verify_trees 3.06
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master3 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'master3', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master4 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39004, 'ldap-secureport': 63704, 'server-id': 'master4', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect a334672f-ccc3-4563-b3c7-459e460b71bd / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 22584e93-6bfe-4cd3-b876-b1d2a0abaf48 / got description=a334672f-ccc3-4563-b3c7-459e460b71bd) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:156 Joining master master3 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect d5634179-3872-4c6d-9e1c-9e27d7dafd64 / got description=22584e93-6bfe-4cd3-b876-b1d2a0abaf48) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect d277ea4d-02e2-410d-bd79-ce689bab7dc1 / got description=d5634179-3872-4c6d-9e1c-9e27d7dafd64) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 INFO  lib389.topologies:topologies.py:156 Joining master master4 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 8d95c1bb-5f3b-4989-8b1f-ccc8791bb0c9 / got description=d277ea4d-02e2-410d-bd79-ce689bab7dc1) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 985b0453-ad9e-4f6d-b89f-b2a242240339 / got description=8d95c1bb-5f3b-4989-8b1f-ccc8791bb0c9) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master3 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master4 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master3 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master4 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master2 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master4 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master4 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master4 to master2 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master4 to master3 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b12a5fa8-1c68-463b-8d67-c70b01c94a33 / got description=985b0453-ad9e-4f6d-b89f-b2a242240339) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect f0b6cce1-77e6-46e9-a1b2-23b4c89809fe / got description=b12a5fa8-1c68-463b-8d67-c70b01c94a33) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect f5d03df4-9c66-411f-a8c5-d7a0e5460488 / got description=f0b6cce1-77e6-46e9-a1b2-23b4c89809fe) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working
Passed suites/fourwaymmr/fourwaymmr_test.py::test_sync_through_to_all_4_masters 3.05
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 1d14a1a1-1573-4b7f-94c2-33acab264447 / got description=f5d03df4-9c66-411f-a8c5-d7a0e5460488) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 8f8b1838-ba64-4cb0-85bc-774d648fc795 / got description=1d14a1a1-1573-4b7f-94c2-33acab264447) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect a7c4593b-358b-4000-b430-4d5c999e7074 / got description=8f8b1838-ba64-4cb0-85bc-774d648fc795) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working
Passed suites/fourwaymmr/fourwaymmr_test.py::test_modify_some_data_in_m3 8.20
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 7a3158d9-c316-4c3a-bd02-25517452b1ae / got description=a7c4593b-358b-4000-b430-4d5c999e7074) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 48c7a595-ed72-4f14-b00a-e92f561e4b2d / got description=7a3158d9-c316-4c3a-bd02-25517452b1ae) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect afa44aee-78ea-400c-87a5-ff9249f454f1 / got description=48c7a595-ed72-4f14-b00a-e92f561e4b2d) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working
Passed suites/fourwaymmr/fourwaymmr_test.py::test_delete_a_few_entries_in_m4 5.14
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 174a40ac-6457-4efd-938b-c0df443676ae / got description=afa44aee-78ea-400c-87a5-ff9249f454f1) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 174a40ac-6457-4efd-938b-c0df443676ae / got description=afa44aee-78ea-400c-87a5-ff9249f454f1) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 82d155c7-1fb7-40cc-b169-02b50915bda4 / got description=174a40ac-6457-4efd-938b-c0df443676ae) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 54cc72c6-a7b9-418c-a259-72d0a51f216f / got description=82d155c7-1fb7-40cc-b169-02b50915bda4) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 8597e116-88c1-4ef9-95db-65806b0d05ef / got description=54cc72c6-a7b9-418c-a259-72d0a51f216f) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working
Passed suites/fourwaymmr/fourwaymmr_test.py::test_replicated_multivalued_entries 1.13
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect d8683ef5-3ef8-4ac1-b900-b3e27d6af8cc / got description=8597e116-88c1-4ef9-95db-65806b0d05ef) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working
Passed suites/fourwaymmr/fourwaymmr_test.py::test_bad_replication_agreement 22.14
No log output captured.
Passed suites/fourwaymmr/fourwaymmr_test.py::test_nsds5replicaenabled_verify 68.41
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ef43f811-6be0-48f8-a340-fa3f00ad66a9 / got description=d8683ef5-3ef8-4ac1-b900-b3e27d6af8cc) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ef43f811-6be0-48f8-a340-fa3f00ad66a9 / got description=d8683ef5-3ef8-4ac1-b900-b3e27d6af8cc) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ef43f811-6be0-48f8-a340-fa3f00ad66a9 / got description=d8683ef5-3ef8-4ac1-b900-b3e27d6af8cc) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ef43f811-6be0-48f8-a340-fa3f00ad66a9 / got description=d8683ef5-3ef8-4ac1-b900-b3e27d6af8cc) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ef43f811-6be0-48f8-a340-fa3f00ad66a9 / got description=d8683ef5-3ef8-4ac1-b900-b3e27d6af8cc) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ef43f811-6be0-48f8-a340-fa3f00ad66a9 / got description=d8683ef5-3ef8-4ac1-b900-b3e27d6af8cc) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ef43f811-6be0-48f8-a340-fa3f00ad66a9 / got description=d8683ef5-3ef8-4ac1-b900-b3e27d6af8cc) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ef43f811-6be0-48f8-a340-fa3f00ad66a9 / got description=d8683ef5-3ef8-4ac1-b900-b3e27d6af8cc) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ef43f811-6be0-48f8-a340-fa3f00ad66a9 / got description=d8683ef5-3ef8-4ac1-b900-b3e27d6af8cc) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ef43f811-6be0-48f8-a340-fa3f00ad66a9 / got description=d8683ef5-3ef8-4ac1-b900-b3e27d6af8cc) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ef43f811-6be0-48f8-a340-fa3f00ad66a9 / got description=d8683ef5-3ef8-4ac1-b900-b3e27d6af8cc) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ef43f811-6be0-48f8-a340-fa3f00ad66a9 / got description=d8683ef5-3ef8-4ac1-b900-b3e27d6af8cc) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ef43f811-6be0-48f8-a340-fa3f00ad66a9 / got description=d8683ef5-3ef8-4ac1-b900-b3e27d6af8cc) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ef43f811-6be0-48f8-a340-fa3f00ad66a9 / got description=d8683ef5-3ef8-4ac1-b900-b3e27d6af8cc) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ef43f811-6be0-48f8-a340-fa3f00ad66a9 / got description=d8683ef5-3ef8-4ac1-b900-b3e27d6af8cc) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ef43f811-6be0-48f8-a340-fa3f00ad66a9 / got description=d8683ef5-3ef8-4ac1-b900-b3e27d6af8cc) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ef43f811-6be0-48f8-a340-fa3f00ad66a9 / got description=d8683ef5-3ef8-4ac1-b900-b3e27d6af8cc) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ef43f811-6be0-48f8-a340-fa3f00ad66a9 / got description=d8683ef5-3ef8-4ac1-b900-b3e27d6af8cc) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ef43f811-6be0-48f8-a340-fa3f00ad66a9 / got description=d8683ef5-3ef8-4ac1-b900-b3e27d6af8cc) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ef43f811-6be0-48f8-a340-fa3f00ad66a9 / got description=d8683ef5-3ef8-4ac1-b900-b3e27d6af8cc) INFO  lib389.replica:replica.py:2500 FAIL: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ef43f811-6be0-48f8-a340-fa3f00ad66a9 / got description=d8683ef5-3ef8-4ac1-b900-b3e27d6af8cc) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect d16f65a3-bc57-4563-855a-ab00184c3403 / got description=ef43f811-6be0-48f8-a340-fa3f00ad66a9) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 3d7dde09-776a-411f-a104-e8a82647145a / got description=d16f65a3-bc57-4563-855a-ab00184c3403) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 3d7dde09-776a-411f-a104-e8a82647145a / got description=d16f65a3-bc57-4563-855a-ab00184c3403) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 3d7dde09-776a-411f-a104-e8a82647145a / got description=d16f65a3-bc57-4563-855a-ab00184c3403) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 3d7dde09-776a-411f-a104-e8a82647145a / got description=d16f65a3-bc57-4563-855a-ab00184c3403) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 3d7dde09-776a-411f-a104-e8a82647145a / got description=d16f65a3-bc57-4563-855a-ab00184c3403) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 3d7dde09-776a-411f-a104-e8a82647145a / got description=d16f65a3-bc57-4563-855a-ab00184c3403) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 3d7dde09-776a-411f-a104-e8a82647145a / got description=d16f65a3-bc57-4563-855a-ab00184c3403) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 3d7dde09-776a-411f-a104-e8a82647145a / got description=d16f65a3-bc57-4563-855a-ab00184c3403) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 3d7dde09-776a-411f-a104-e8a82647145a / got description=d16f65a3-bc57-4563-855a-ab00184c3403) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 3d7dde09-776a-411f-a104-e8a82647145a / got description=d16f65a3-bc57-4563-855a-ab00184c3403) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 3d7dde09-776a-411f-a104-e8a82647145a / got description=d16f65a3-bc57-4563-855a-ab00184c3403) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 3d7dde09-776a-411f-a104-e8a82647145a / got description=d16f65a3-bc57-4563-855a-ab00184c3403) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 3d7dde09-776a-411f-a104-e8a82647145a / got description=d16f65a3-bc57-4563-855a-ab00184c3403) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 3d7dde09-776a-411f-a104-e8a82647145a / got description=d16f65a3-bc57-4563-855a-ab00184c3403) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 3d7dde09-776a-411f-a104-e8a82647145a / got description=d16f65a3-bc57-4563-855a-ab00184c3403) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 3d7dde09-776a-411f-a104-e8a82647145a / got description=d16f65a3-bc57-4563-855a-ab00184c3403) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 3d7dde09-776a-411f-a104-e8a82647145a / got description=d16f65a3-bc57-4563-855a-ab00184c3403) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 3d7dde09-776a-411f-a104-e8a82647145a / got description=d16f65a3-bc57-4563-855a-ab00184c3403) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 3d7dde09-776a-411f-a104-e8a82647145a / got description=d16f65a3-bc57-4563-855a-ab00184c3403) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 3d7dde09-776a-411f-a104-e8a82647145a / got description=d16f65a3-bc57-4563-855a-ab00184c3403) INFO  lib389.replica:replica.py:2500 FAIL: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 3d7dde09-776a-411f-a104-e8a82647145a / got description=d16f65a3-bc57-4563-855a-ab00184c3403) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 51e6a894-0d74-476f-aa09-9754fb813f38 / got description=d16f65a3-bc57-4563-855a-ab00184c3403) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5dcdf62d-ef69-407a-add7-02bab943137e / got description=51e6a894-0d74-476f-aa09-9754fb813f38) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5dcdf62d-ef69-407a-add7-02bab943137e / got description=51e6a894-0d74-476f-aa09-9754fb813f38) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5dcdf62d-ef69-407a-add7-02bab943137e / got description=51e6a894-0d74-476f-aa09-9754fb813f38) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5dcdf62d-ef69-407a-add7-02bab943137e / got description=51e6a894-0d74-476f-aa09-9754fb813f38) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5dcdf62d-ef69-407a-add7-02bab943137e / got description=51e6a894-0d74-476f-aa09-9754fb813f38) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5dcdf62d-ef69-407a-add7-02bab943137e / got description=51e6a894-0d74-476f-aa09-9754fb813f38) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5dcdf62d-ef69-407a-add7-02bab943137e / got description=51e6a894-0d74-476f-aa09-9754fb813f38) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5dcdf62d-ef69-407a-add7-02bab943137e / got description=51e6a894-0d74-476f-aa09-9754fb813f38) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5dcdf62d-ef69-407a-add7-02bab943137e / got description=51e6a894-0d74-476f-aa09-9754fb813f38) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5dcdf62d-ef69-407a-add7-02bab943137e / got description=51e6a894-0d74-476f-aa09-9754fb813f38) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5dcdf62d-ef69-407a-add7-02bab943137e / got description=51e6a894-0d74-476f-aa09-9754fb813f38) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5dcdf62d-ef69-407a-add7-02bab943137e / got description=51e6a894-0d74-476f-aa09-9754fb813f38) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5dcdf62d-ef69-407a-add7-02bab943137e / got description=51e6a894-0d74-476f-aa09-9754fb813f38) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5dcdf62d-ef69-407a-add7-02bab943137e / got description=51e6a894-0d74-476f-aa09-9754fb813f38) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5dcdf62d-ef69-407a-add7-02bab943137e / got description=51e6a894-0d74-476f-aa09-9754fb813f38) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5dcdf62d-ef69-407a-add7-02bab943137e / got description=51e6a894-0d74-476f-aa09-9754fb813f38) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5dcdf62d-ef69-407a-add7-02bab943137e / got description=51e6a894-0d74-476f-aa09-9754fb813f38) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5dcdf62d-ef69-407a-add7-02bab943137e / got description=51e6a894-0d74-476f-aa09-9754fb813f38) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5dcdf62d-ef69-407a-add7-02bab943137e / got description=51e6a894-0d74-476f-aa09-9754fb813f38) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5dcdf62d-ef69-407a-add7-02bab943137e / got description=51e6a894-0d74-476f-aa09-9754fb813f38) INFO  lib389.replica:replica.py:2500 FAIL: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5dcdf62d-ef69-407a-add7-02bab943137e / got description=51e6a894-0d74-476f-aa09-9754fb813f38) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect d1081d45-aaf1-4198-beaa-3db955335b23 / got description=5dcdf62d-ef69-407a-add7-02bab943137e) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working
Passed suites/fourwaymmr/fourwaymmr_test.py::test_create_an_entry_on_the_supplier 2.31
No log output captured.
Passed suites/fourwaymmr/fourwaymmr_test.py::test_bob_acceptance_tests 6.40
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5dea6b76-fd2e-4630-ba44-f20993eedb6f / got description=d1081d45-aaf1-4198-beaa-3db955335b23) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working
Passed suites/fourwaymmr/fourwaymmr_test.py::test_replica_backup_and_restore 29.52
------------------------------Captured stderr call------------------------------
ldiffile: /tmp/output_file [28/Oct/2020:21:19:51.813002287 -0400] - INFO - slapd_exemode_ldif2db - Backend Instance: userRoot
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 09d234fb-22ca-4f05-9b3c-43c1ff783532 / got description=5dea6b76-fd2e-4630-ba44-f20993eedb6f) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 9125cb7b-388c-4e98-9ef1-2e33a37432ad / got description=09d234fb-22ca-4f05-9b3c-43c1ff783532) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 439d0315-67c7-4c8c-8ba8-615a71250869 / got description=9125cb7b-388c-4e98-9ef1-2e33a37432ad) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect a77da8a9-24df-4d51-a7d4-c135edd05f86 / got description=439d0315-67c7-4c8c-8ba8-615a71250869) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 093ddcc4-8aa9-4af1-b623-d4690b4f2617 / got description=a77da8a9-24df-4d51-a7d4-c135edd05f86) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 338e3104-b8ac-47bc-b025-53864b5b4f0d / got description=093ddcc4-8aa9-4af1-b623-d4690b4f2617) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working
Passed suites/fractional/fractional_test.py::test_fractional_agreements 3.11
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for consumer1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for consumer2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39202, 'ldap-secureport': 63902, 'server-id': 'consumer2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 2eeeebfc-fc91-47b5-93d3-8bd0e36d69d3 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect df3b4ace-7313-4ce3-a2ea-7731ac6e17b9 / got description=2eeeebfc-fc91-47b5-93d3-8bd0e36d69d3) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:169 Joining consumer consumer1 from master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 80f1473f-518a-42a9-a214-51cb873e1b3b / got description=df3b4ace-7313-4ce3-a2ea-7731ac6e17b9) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is working INFO  lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 INFO  lib389.topologies:topologies.py:169 Joining consumer consumer2 from master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect 4ab68993-e0a3-4d7c-a03b-87564029efa2 / got description=80f1473f-518a-42a9-a214-51cb873e1b3b) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 is working INFO  lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 INFO  lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 already exists INFO  lib389.topologies:topologies.py:174 Ensuring consumer consumer2 from master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 already exists INFO  lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from master2 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is was created INFO  lib389.topologies:topologies.py:174 Ensuring consumer consumer2 from master2 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 is was created
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 7ea7d604-cd2e-4f79-b577-17aef5edaa41 / got description=4ab68993-e0a3-4d7c-a03b-87564029efa2) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 278428ba-1490-46e5-b511-99c65a3bf39f / got description=7ea7d604-cd2e-4f79-b577-17aef5edaa41) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect 8cc76b87-dda2-4e14-8767-901cdb896b2f / got description=278428ba-1490-46e5-b511-99c65a3bf39f) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 is working
Passed suites/fractional/fractional_test.py::test_read_only_consumer 0.08
No log output captured.
Passed suites/fractional/fractional_test.py::test_read_write_supplier 3.08
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect d94e1d26-017c-4934-87b5-97513bed2361 / got description=8cc76b87-dda2-4e14-8767-901cdb896b2f) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 83b321b9-bad9-47df-9f32-16d50f4e94c3 / got description=d94e1d26-017c-4934-87b5-97513bed2361) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect d4e1ae84-27ec-4d51-8e96-75932c4963f5 / got description=83b321b9-bad9-47df-9f32-16d50f4e94c3) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 is working
Passed suites/fractional/fractional_test.py::test_filtered_attributes 3.07
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0ce6a28a-e7ed-4402-830d-e0fec41d400a / got description=d4e1ae84-27ec-4d51-8e96-75932c4963f5) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect d49754c1-f6f7-477c-80c9-ab2e9733ea55 / got description=0ce6a28a-e7ed-4402-830d-e0fec41d400a) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect 2c8a85a6-775f-4780-aaca-d34d56027979 / got description=d49754c1-f6f7-477c-80c9-ab2e9733ea55) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 is working
Passed suites/fractional/fractional_test.py::test_fewer_changes_in_single_operation 9.28
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 20c1171e-1fe3-470b-9458-61f667d3efa1 / got description=2c8a85a6-775f-4780-aaca-d34d56027979) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 88b02d7a-cc10-441c-8cb8-9a26571f81b8 / got description=20c1171e-1fe3-470b-9458-61f667d3efa1) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect c22057c5-41d8-49fa-bfce-e50a50b44a50 / got description=88b02d7a-cc10-441c-8cb8-9a26571f81b8) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect a4f582ef-de6c-4fa8-8c08-299f4f45f103 / got description=c22057c5-41d8-49fa-bfce-e50a50b44a50) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 5e3edfb2-9696-4dca-9ea0-5310e74146d9 / got description=a4f582ef-de6c-4fa8-8c08-299f4f45f103) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect a94d1078-a686-4c60-83da-d84c23c5ff2c / got description=5e3edfb2-9696-4dca-9ea0-5310e74146d9) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 1c8ea601-8a05-421e-868a-01a8ea3faa77 / got description=a94d1078-a686-4c60-83da-d84c23c5ff2c) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 4538dc61-8f71-4a76-a171-e668f983b48b / got description=1c8ea601-8a05-421e-868a-01a8ea3faa77) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect 71789b4c-cf4a-47e8-8299-bce99d9a3bd4 / got description=4538dc61-8f71-4a76-a171-e668f983b48b) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 is working
Passed suites/fractional/fractional_test.py::test_newly_added_attribute_nsds5replicatedattributelisttotal 4.07
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect dfb917c7-8b17-4ea0-8964-1da2435118c1 / got description=71789b4c-cf4a-47e8-8299-bce99d9a3bd4) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect dfb917c7-8b17-4ea0-8964-1da2435118c1 / got description=71789b4c-cf4a-47e8-8299-bce99d9a3bd4) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 5cd182cd-e14b-4106-b14f-6cfb7ca24feb / got description=dfb917c7-8b17-4ea0-8964-1da2435118c1) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect 145a95c7-4efb-47da-b45d-14947f5beb3a / got description=5cd182cd-e14b-4106-b14f-6cfb7ca24feb) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 is working
Passed suites/fractional/fractional_test.py::test_attribute_nsds5replicatedattributelisttotal 21.65
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 733b107f-086a-43e6-939b-fbb9c5d9831e / got description=145a95c7-4efb-47da-b45d-14947f5beb3a) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect adcc15a3-c305-4bab-aa75-db054ae2cb82 / got description=733b107f-086a-43e6-939b-fbb9c5d9831e) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect 551d2426-7516-4d14-94c3-ee5cc333892c / got description=adcc15a3-c305-4bab-aa75-db054ae2cb82) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 is working
Passed suites/fractional/fractional_test.py::test_implicit_replication_of_password_policy 12.28
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect a64aaf7d-d5ef-419a-a7e1-5718f9b1b21b / got description=551d2426-7516-4d14-94c3-ee5cc333892c) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 4e155b0a-717f-4680-81d4-364fd17b236e / got description=a64aaf7d-d5ef-419a-a7e1-5718f9b1b21b) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect da0637b0-fa00-4b5e-a130-a836ccdb7aca / got description=4e155b0a-717f-4680-81d4-364fd17b236e) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 4f74092f-78b9-46d5-a999-9bf9f1fce837 / got description=da0637b0-fa00-4b5e-a130-a836ccdb7aca) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 4f74092f-78b9-46d5-a999-9bf9f1fce837 / got description=da0637b0-fa00-4b5e-a130-a836ccdb7aca) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 4f74092f-78b9-46d5-a999-9bf9f1fce837 / got description=da0637b0-fa00-4b5e-a130-a836ccdb7aca) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 4f74092f-78b9-46d5-a999-9bf9f1fce837 / got description=da0637b0-fa00-4b5e-a130-a836ccdb7aca) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 8d519f89-4f13-49ab-971a-f3cbbdc798a1 / got description=4f74092f-78b9-46d5-a999-9bf9f1fce837) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect ef78628d-1a3f-4b3f-916e-5423753872e1 / got description=8d519f89-4f13-49ab-971a-f3cbbdc798a1) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 2f4e9bec-1e15-4eff-872d-f397928150ca / got description=4f74092f-78b9-46d5-a999-9bf9f1fce837) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 934ea601-672d-439f-baa1-c2e8fb28f759 / got description=2f4e9bec-1e15-4eff-872d-f397928150ca) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect 605b0c8e-6251-4a20-af2a-1b7ad5d9c20c / got description=934ea601-672d-439f-baa1-c2e8fb28f759) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 is working
Passed suites/get_effective_rights/acceptance_test.py::test_group_aci_entry_exists 0.02
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO  tests.suites.get_effective_rights.acceptance_test:acceptance_test.py:30 Adding user testuser
-------------------------------Captured log call--------------------------------
INFO  tests.suites.get_effective_rights.acceptance_test:acceptance_test.py:57 Adding group group1 INFO  tests.suites.get_effective_rights.acceptance_test:acceptance_test.py:70 Add an ACI granting add access to a user matching the groupdn INFO  lib389:acceptance_test.py:79 dn: uid=testuser,dc=example,dc=com INFO  lib389:acceptance_test.py:81 ######## entryLevelRights: b'vadn'
Passed suites/get_effective_rights/acceptance_test.py::test_group_aci_template_entry 0.01
-------------------------------Captured log call--------------------------------
INFO  tests.suites.get_effective_rights.acceptance_test:acceptance_test.py:105 Add an ACI granting add access to a user matching the userdn INFO  lib389:acceptance_test.py:115 dn: cn=template_person_objectclass,dc=example,dc=com INFO  lib389:acceptance_test.py:117 ######## entryLevelRights: b'vadn' INFO  lib389:acceptance_test.py:120 dn: cn=template_groupofnames_objectclass,dc=example,dc=com INFO  lib389:acceptance_test.py:122 ######## entryLevelRights: b'none'
Passed suites/gssapi/simple_gssapi_test.py::test_invalid_sasl_map 0.22
No log output captured.
Passed suites/gssapi/simple_gssapi_test.py::test_missing_user 1.02
------------------------------Captured stdout call------------------------------
Authenticating as principal testuser/admin@HOSTED.UPSHIFT.RDU2.REDHAT.COM with password. Principal "doesnotexist@HOSTED.UPSHIFT.RDU2.REDHAT.COM" created. Authenticating as principal testuser/admin@HOSTED.UPSHIFT.RDU2.REDHAT.COM with password. K/M@HOSTED.UPSHIFT.RDU2.REDHAT.COM doesnotexist@HOSTED.UPSHIFT.RDU2.REDHAT.COM kadmin/admin@HOSTED.UPSHIFT.RDU2.REDHAT.COM kadmin/changepw@HOSTED.UPSHIFT.RDU2.REDHAT.COM kadmin/ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com@HOSTED.UPSHIFT.RDU2.REDHAT.COM kiprop/ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com@HOSTED.UPSHIFT.RDU2.REDHAT.COM krbtgt/HOSTED.UPSHIFT.RDU2.REDHAT.COM@HOSTED.UPSHIFT.RDU2.REDHAT.COM ldap/ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com@HOSTED.UPSHIFT.RDU2.REDHAT.COM testuser@HOSTED.UPSHIFT.RDU2.REDHAT.COM Authenticating as principal testuser/admin@HOSTED.UPSHIFT.RDU2.REDHAT.COM with password. Entry for principal doesnotexist@HOSTED.UPSHIFT.RDU2.REDHAT.COM with kvno 2, encryption type aes256-cts-hmac-sha1-96 added to keytab WRFILE:/tmp/doesnotexist.keytab. Entry for principal doesnotexist@HOSTED.UPSHIFT.RDU2.REDHAT.COM with kvno 2, encryption type aes128-cts-hmac-sha1-96 added to keytab WRFILE:/tmp/doesnotexist.keytab.
------------------------------Captured stderr call------------------------------
No policy specified for doesnotexist@HOSTED.UPSHIFT.RDU2.REDHAT.COM; defaulting to no policy
Passed suites/gssapi/simple_gssapi_test.py::test_rejected_mech 0.23
No log output captured.
Passed suites/gssapi_repl/gssapi_repl_test.py::test_gssapi_repl 0.00
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 221b3f07-30c9-4943-bdb0-724dc87459b4 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 24455912-e698-4008-b7bd-6f198d2cd2df / got description=221b3f07-30c9-4943-bdb0-724dc87459b4) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
Passed suites/healthcheck/health_config_test.py::test_healthcheck_logging_format_should_be_revised 0.45
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking config:hr_timestamp ... INFO  LogCapture:health.py:99 Checking config:passwordscheme ... INFO  LogCapture:health.py:99 Checking refint:attr_indexes ... INFO  LogCapture:health.py:99 Checking refint:update_delay ... INFO  LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO  LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO  LogCapture:health.py:99 Checking backends:userroot:search ... INFO  LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO  LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO  LogCapture:health.py:99 Checking logs:notes ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:119 1 Issue found! Generating report ... INFO  LogCapture:health.py:45 [1] DS Lint Error: DSCLE0001 INFO  LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO  LogCapture:health.py:47 Severity: LOW INFO  LogCapture:health.py:49 Check: config:hr_timestamp INFO  LogCapture:health.py:50 Affects: INFO  LogCapture:health.py:52 -- cn=config INFO  LogCapture:health.py:53 Details: INFO  LogCapture:health.py:54 ----------- INFO  LogCapture:health.py:55 nsslapd-logging-hr-timestamps-enabled changes the log format in directory server from [07/Jun/2017:17:15:58 +1000] to [07/Jun/2017:17:15:58.716117312 +1000] This actually provides a performance improvement. Additionally, this setting will be removed in a future release. INFO  LogCapture:health.py:56 Resolution: INFO  LogCapture:health.py:57 ----------- INFO  LogCapture:health.py:58 Set nsslapd-logging-hr-timestamps-enabled to on. You can use 'dsconf' to set this attribute. Here is an example: # dsconf slapd-standalone1 config replace nsslapd-logging-hr-timestamps-enabled=on INFO  LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== INFO  LogCapture:health.py:126 [ { "dsle": "DSCLE0001", "severity": "LOW", "description": "Different log timestamp format.", "items": [ "cn=config" ], "detail": "nsslapd-logging-hr-timestamps-enabled changes the log format in directory server from\n\n[07/Jun/2017:17:15:58 +1000]\n\nto\n\n[07/Jun/2017:17:15:58.716117312 +1000]\n\nThis actually provides a performance improvement. Additionally, this setting will be\nremoved in a future release.\n", "fix": "Set nsslapd-logging-hr-timestamps-enabled to on.\nYou can use 'dsconf' to set this attribute. Here is an example:\n\n # dsconf slapd-standalone1 config replace nsslapd-logging-hr-timestamps-enabled=on", "check": "config:hr_timestamp" } ] INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking config:hr_timestamp ... INFO  LogCapture:health.py:99 Checking config:passwordscheme ... INFO  LogCapture:health.py:99 Checking refint:attr_indexes ... INFO  LogCapture:health.py:99 Checking refint:update_delay ... INFO  LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO  LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO  LogCapture:health.py:99 Checking backends:userroot:search ... INFO  LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO  LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO  LogCapture:health.py:99 Checking logs:notes ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:111 No issues found. INFO  LogCapture:health.py:113 []
Passed suites/healthcheck/health_config_test.py::test_healthcheck_RI_plugin_is_misconfigured 0.47
-------------------------------Captured log call--------------------------------
INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking config:hr_timestamp ... INFO  LogCapture:health.py:99 Checking config:passwordscheme ... INFO  LogCapture:health.py:99 Checking refint:attr_indexes ... INFO  LogCapture:health.py:99 Checking refint:update_delay ... INFO  LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO  LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO  LogCapture:health.py:99 Checking backends:userroot:search ... INFO  LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO  LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO  LogCapture:health.py:99 Checking logs:notes ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:119 1 Issue found! Generating report ... INFO  LogCapture:health.py:45 [1] DS Lint Error: DSRILE0001 INFO  LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO  LogCapture:health.py:47 Severity: LOW INFO  LogCapture:health.py:49 Check: refint:update_delay INFO  LogCapture:health.py:50 Affects: INFO  LogCapture:health.py:52 -- cn=referential integrity postoperation,cn=plugins,cn=config INFO  LogCapture:health.py:53 Details: INFO  LogCapture:health.py:54 ----------- INFO  LogCapture:health.py:55 The referential integrity plugin has an asynchronous processing mode. This is controlled by the update-delay flag. When this value is 0, referential integrity plugin processes these changes inside of the operation that modified the entry - ie these are synchronous. However, when this is > 0, these are performed asynchronously. This leads to only having referint enabled on one master in MMR to prevent replication conflicts and loops. Additionally, because these are performed in the background these updates may cause spurious update delays to your server by batching changes rather than smaller updates during sync processing. We advise that you set this value to 0, and enable referint on all masters as it provides a more predictable behaviour. INFO  LogCapture:health.py:56 Resolution: INFO  LogCapture:health.py:57 ----------- INFO  LogCapture:health.py:58 Set referint-update-delay to 0. You can use 'dsconf' to set this value. Here is an example: # dsconf slapd-standalone1 plugin referential-integrity set --update-delay 0 You must restart the Directory Server for this change to take effect. INFO  LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== INFO  LogCapture:health.py:126 [ { "dsle": "DSRILE0001", "severity": "LOW", "description": "Referential integrity plugin may be slower.", "items": [ "cn=referential integrity postoperation,cn=plugins,cn=config" ], "detail": "The referential integrity plugin has an asynchronous processing mode.\nThis is controlled by the update-delay flag. When this value is 0, referential\nintegrity plugin processes these changes inside of the operation that modified\nthe entry - ie these are synchronous.\n\nHowever, when this is > 0, these are performed asynchronously.\n\nThis leads to only having referint enabled on one master in MMR to prevent replication conflicts and loops.\nAdditionally, because these are performed in the background these updates may cause spurious update\ndelays to your server by batching changes rather than smaller updates during sync processing.\n\nWe advise that you set this value to 0, and enable referint on all masters as it provides a more predictable behaviour.\n", "fix": "Set referint-update-delay to 0.\n\nYou can use 'dsconf' to set this value. Here is an example:\n\n # dsconf slapd-standalone1 plugin referential-integrity set --update-delay 0\n\nYou must restart the Directory Server for this change to take effect.", "check": "refint:update_delay" } ] INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking config:hr_timestamp ... INFO  LogCapture:health.py:99 Checking config:passwordscheme ... INFO  LogCapture:health.py:99 Checking refint:attr_indexes ... INFO  LogCapture:health.py:99 Checking refint:update_delay ... INFO  LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO  LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO  LogCapture:health.py:99 Checking backends:userroot:search ... INFO  LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO  LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO  LogCapture:health.py:99 Checking logs:notes ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:111 No issues found. INFO  LogCapture:health.py:113 []
Passed suites/healthcheck/health_config_test.py::test_healthcheck_RI_plugin_missing_indexes 0.96
-------------------------------Captured log call--------------------------------
INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking config:hr_timestamp ... INFO  LogCapture:health.py:99 Checking config:passwordscheme ... INFO  LogCapture:health.py:99 Checking refint:attr_indexes ... INFO  LogCapture:health.py:99 Checking refint:update_delay ... INFO  LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO  LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO  LogCapture:health.py:99 Checking backends:userroot:search ... INFO  LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO  LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO  LogCapture:health.py:99 Checking logs:notes ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:119 1 Issue found! Generating report ... INFO  LogCapture:health.py:45 [1] DS Lint Error: DSRILE0002 INFO  LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO  LogCapture:health.py:47 Severity: HIGH INFO  LogCapture:health.py:49 Check: refint:attr_indexes INFO  LogCapture:health.py:50 Affects: INFO  LogCapture:health.py:52 -- cn=referential integrity postoperation,cn=plugins,cn=config INFO  LogCapture:health.py:52 -- dc=example,dc=com INFO  LogCapture:health.py:52 -- member INFO  LogCapture:health.py:53 Details: INFO  LogCapture:health.py:54 ----------- INFO  LogCapture:health.py:55 The referential integrity plugin is configured to use an attribute (member) that does not have an "equality" index in backend (dc=example,dc=com). Failure to have the proper indexing will lead to unindexed searches which cause high CPU and can significantly slow the server down. INFO  LogCapture:health.py:56 Resolution: INFO  LogCapture:health.py:57 ----------- INFO  LogCapture:health.py:58 Check the attributes set in "referint-membership-attr" to make sure they have an index defined that has at least the equality "eq" index type. You will need to reindex the database after adding the missing index type. Here is an example using dsconf: # dsconf slapd-standalone1 backend index add --attr=member --reindex --index-type=eq dc=example,dc=com INFO  LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== INFO  LogCapture:health.py:126 [ { "dsle": "DSRILE0002", "severity": "HIGH", "description": "Referential integrity plugin configured with unindexed attribute.", "items": [ "cn=referential integrity postoperation,cn=plugins,cn=config", "dc=example,dc=com", "member" ], "detail": "The referential integrity plugin is configured to use an attribute (member)\nthat does not have an \"equality\" index in backend (dc=example,dc=com).\nFailure to have the proper indexing will lead to unindexed searches which\ncause high CPU and can significantly slow the server down.", "fix": "Check the attributes set in \"referint-membership-attr\" to make sure they have\nan index defined that has at least the equality \"eq\" index type. You will\nneed to reindex the database after adding the missing index type. Here is an\nexample using dsconf:\n\n # dsconf slapd-standalone1 backend index add --attr=member --reindex --index-type=eq dc=example,dc=com\n", "check": "refint:attr_indexes" } ] INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking config:hr_timestamp ... INFO  LogCapture:health.py:99 Checking config:passwordscheme ... INFO  LogCapture:health.py:99 Checking refint:attr_indexes ... INFO  LogCapture:health.py:99 Checking refint:update_delay ... INFO  LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO  LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO  LogCapture:health.py:99 Checking backends:userroot:search ... INFO  LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO  LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO  LogCapture:health.py:99 Checking logs:notes ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:111 No issues found. INFO  LogCapture:health.py:113 []
Passed suites/healthcheck/health_config_test.py::test_healthcheck_virtual_attr_incorrectly_indexed 0.13
-------------------------------Captured log call--------------------------------
INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking config:hr_timestamp ... INFO  LogCapture:health.py:99 Checking config:passwordscheme ... INFO  LogCapture:health.py:99 Checking refint:attr_indexes ... INFO  LogCapture:health.py:99 Checking refint:update_delay ... INFO  LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO  LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO  LogCapture:health.py:99 Checking backends:userroot:search ... INFO  LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO  LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO  LogCapture:health.py:99 Checking logs:notes ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:119 1 Issue found! Generating report ... INFO  LogCapture:health.py:45 [1] DS Lint Error: DSVIRTLE0001 INFO  LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO  LogCapture:health.py:47 Severity: HIGH INFO  LogCapture:health.py:49 Check: backends:userroot:virt_attrs INFO  LogCapture:health.py:50 Affects: INFO  LogCapture:health.py:52 -- Virtual Attributes INFO  LogCapture:health.py:52 -- dc=example,dc=com INFO  LogCapture:health.py:52 -- Class Of Service (COS) INFO  LogCapture:health.py:52 -- cosAttribute: postalcode INFO  LogCapture:health.py:53 Details: INFO  LogCapture:health.py:54 ----------- INFO  LogCapture:health.py:55 You should not index virtual attributes, and as this will break searches that use the attribute in a filter. INFO  LogCapture:health.py:56 Resolution: INFO  LogCapture:health.py:57 ----------- INFO  LogCapture:health.py:58 Remove the index for this attribute from the backend configuration. Here is an example using 'dsconf' to remove an index: # dsconf slapd-standalone1 backend index delete --attr postalcode dc=example,dc=com INFO  LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== INFO  LogCapture:health.py:126 [ { "dsle": "DSVIRTLE0001", "severity": "HIGH", "description": "Virtual attribute indexed.", "items": [ "Virtual Attributes", "dc=example,dc=com", "Class Of Service (COS)", "cosAttribute: postalcode" ], "detail": "You should not index virtual attributes, and as this will break searches that\nuse the attribute in a filter.", "fix": "Remove the index for this attribute from the backend configuration.\nHere is an example using 'dsconf' to remove an index:\n\n # dsconf slapd-standalone1 backend index delete --attr postalcode dc=example,dc=com", "check": "backends:userroot:virt_attrs" } ]
Passed suites/healthcheck/health_config_test.py::test_healthcheck_low_disk_space 0.15
-------------------------------Captured log call--------------------------------
INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking config:hr_timestamp ... INFO  LogCapture:health.py:99 Checking config:passwordscheme ... INFO  LogCapture:health.py:99 Checking refint:attr_indexes ... INFO  LogCapture:health.py:99 Checking refint:update_delay ... INFO  LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO  LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO  LogCapture:health.py:99 Checking backends:userroot:search ... INFO  LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO  LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO  LogCapture:health.py:99 Checking logs:notes ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:119 2 Issues found! Generating report ... INFO  LogCapture:health.py:45 [1] DS Lint Error: DSVIRTLE0001 INFO  LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO  LogCapture:health.py:47 Severity: HIGH INFO  LogCapture:health.py:49 Check: backends:userroot:virt_attrs INFO  LogCapture:health.py:50 Affects: INFO  LogCapture:health.py:52 -- Virtual Attributes INFO  LogCapture:health.py:52 -- dc=example,dc=com INFO  LogCapture:health.py:52 -- Class Of Service (COS) INFO  LogCapture:health.py:52 -- cosAttribute: postalcode INFO  LogCapture:health.py:53 Details: INFO  LogCapture:health.py:54 ----------- INFO  LogCapture:health.py:55 You should not index virtual attributes, and as this will break searches that use the attribute in a filter. INFO  LogCapture:health.py:56 Resolution: INFO  LogCapture:health.py:57 ----------- INFO  LogCapture:health.py:58 Remove the index for this attribute from the backend configuration. Here is an example using 'dsconf' to remove an index: # dsconf slapd-standalone1 backend index delete --attr postalcode dc=example,dc=com INFO  LogCapture:health.py:45 [2] DS Lint Error: DSDSLE0001 INFO  LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO  LogCapture:health.py:47 Severity: HIGH INFO  LogCapture:health.py:49 Check: monitor-disk-space:disk_space INFO  LogCapture:health.py:50 Affects: INFO  LogCapture:health.py:52 -- Server INFO  LogCapture:health.py:52 -- cn=config INFO  LogCapture:health.py:53 Details: INFO  LogCapture:health.py:54 ----------- INFO  LogCapture:health.py:55 The disk partition used by the server (/), either for the database, the configuration files, or the logs is over 90% full. If the partition becomes completely filled serious problems can occur with the database or the server's stability. INFO  LogCapture:health.py:56 Resolution: INFO  LogCapture:health.py:57 ----------- INFO  LogCapture:health.py:58 Attempt to free up disk space. Also try removing old rotated logs, or disable any verbose logging levels that might have been set. You might consider enabling the "Disk Monitoring" feature in cn=config to help prevent a disorderly shutdown of the server: nsslapd-disk-monitoring: on You can use 'dsconf' to set this value. Here is an example: # dsconf slapd-standalone1 config replace nsslapd-disk-monitoring=on You must restart the Directory Server for this change to take effect. Please see the Administration guide for more information: https://access.redhat.com/documentation/en-us/red_hat_directory_server/10/html/administration_guide/diskmonitoring INFO  LogCapture:health.py:124 ===== End Of Report (2 Issues found) ===== INFO  LogCapture:health.py:126 [ { "dsle": "DSVIRTLE0001", "severity": "HIGH", "description": "Virtual attribute indexed.", "items": [ "Virtual Attributes", "dc=example,dc=com", "Class Of Service (COS)", "cosAttribute: postalcode" ], "detail": "You should not index virtual attributes, and as this will break searches that\nuse the attribute in a filter.", "fix": "Remove the index for this attribute from the backend configuration.\nHere is an example using 'dsconf' to remove an index:\n\n # dsconf slapd-standalone1 backend index delete --attr postalcode dc=example,dc=com", "check": "backends:userroot:virt_attrs" }, { "dsle": "DSDSLE0001", "severity": "HIGH", "description": "Low disk space.", "items": [ "Server", "cn=config" ], "detail": "The disk partition used by the server (/), either for the database, the\nconfiguration files, or the logs is over 90% full. If the partition becomes\ncompletely filled serious problems can occur with the database or the server's\nstability.", "fix": "Attempt to free up disk space. Also try removing old rotated logs, or disable any\nverbose logging levels that might have been set. You might consider enabling\nthe \"Disk Monitoring\" feature in cn=config to help prevent a disorderly shutdown\nof the server:\n\n nsslapd-disk-monitoring: on\n\nYou can use 'dsconf' to set this value. Here is an example:\n\n # dsconf slapd-standalone1 config replace nsslapd-disk-monitoring=on\n\nYou must restart the Directory Server for this change to take effect.\n\nPlease see the Administration guide for more information:\n\n https://access.redhat.com/documentation/en-us/red_hat_directory_server/10/html/administration_guide/diskmonitoring\n", "check": "monitor-disk-space:disk_space" } ]
Passed suites/healthcheck/health_config_test.py::test_healthcheck_notes_unindexed_search 11.71
-------------------------------Captured log call--------------------------------
INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking config:hr_timestamp ... INFO  LogCapture:health.py:99 Checking config:passwordscheme ... INFO  LogCapture:health.py:99 Checking refint:attr_indexes ... INFO  LogCapture:health.py:99 Checking refint:update_delay ... INFO  LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO  LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO  LogCapture:health.py:99 Checking backends:userroot:search ... INFO  LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO  LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO  LogCapture:health.py:99 Checking logs:notes ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:119 1 Issue found! Generating report ... INFO  LogCapture:health.py:45 [1] DS Lint Error: DSLOGNOTES0001 INFO  LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO  LogCapture:health.py:47 Severity: Medium INFO  LogCapture:health.py:49 Check: logs:notes INFO  LogCapture:health.py:50 Affects: INFO  LogCapture:health.py:52 -- Performance INFO  LogCapture:health.py:52 -- /var/log/dirsrv/slapd-standalone1/access INFO  LogCapture:health.py:53 Details: INFO  LogCapture:health.py:54 ----------- INFO  LogCapture:health.py:55 Found 1 fully unindexed searches in the current access log. Unindexed searches can cause high CPU and slow down the entire server's performance. [1] Unindexed Search - date: . - conn/op: 1/2 - base: dc=example,dc=com - scope: 2 - filter: (&(|(objectClass=nsAccount)(objectClass=nsPerson)(objectClass=simpleSecurityObject)(objectClass=organization)(objectClass=person)(objectClass=account)(objectClass=organizationalUnit)(objectClass=netscapeServer)(objectClass=domain)(objectClass=posixAccount)(objectClass=shadowAccount)(objectClass=posixGroup)(objectClass=mailRecipient))(uid=test*)) - etime: 0.354278529 INFO  LogCapture:health.py:56 Resolution: INFO  LogCapture:health.py:57 ----------- INFO  LogCapture:health.py:58 Examine the searches that are unindexed, and either properly index the attributes in the filter, increase the nsslapd-idlistscanlimit, or stop using that filter. INFO  LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== INFO  LogCapture:health.py:126 [ { "dsle": "DSLOGNOTES0001", "severity": "Medium", "description": "Unindexed Search", "items": [ "Performance", "/var/log/dirsrv/slapd-standalone1/access" ], "detail": "Found 1 fully unindexed searches in the current access log.\nUnindexed searches can cause high CPU and slow down the entire server's performance.\n\n [1] Unindexed Search\n - date: .\n - conn/op: 1/2\n - base: dc=example,dc=com\n - scope: 2\n - filter: (&(|(objectClass=nsAccount)(objectClass=nsPerson)(objectClass=simpleSecurityObject)(objectClass=organization)(objectClass=person)(objectClass=account)(objectClass=organizationalUnit)(objectClass=netscapeServer)(objectClass=domain)(objectClass=posixAccount)(objectClass=shadowAccount)(objectClass=posixGroup)(objectClass=mailRecipient))(uid=test*))\n - etime: 0.354278529\n", "fix": "Examine the searches that are unindexed, and either properly index the attributes\nin the filter, increase the nsslapd-idlistscanlimit, or stop using that filter.", "check": "logs:notes" } ]
Passed suites/healthcheck/health_config_test.py::test_healthcheck_notes_unknown_attribute 11.26
-------------------------------Captured log call--------------------------------
INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking config:hr_timestamp ... INFO  LogCapture:health.py:99 Checking config:passwordscheme ... INFO  LogCapture:health.py:99 Checking refint:attr_indexes ... INFO  LogCapture:health.py:99 Checking refint:update_delay ... INFO  LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO  LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO  LogCapture:health.py:99 Checking backends:userroot:search ... INFO  LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO  LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO  LogCapture:health.py:99 Checking logs:notes ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:119 1 Issue found! Generating report ... INFO  LogCapture:health.py:45 [1] DS Lint Error: DSLOGNOTES0002 INFO  LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO  LogCapture:health.py:47 Severity: Medium INFO  LogCapture:health.py:49 Check: logs:notes INFO  LogCapture:health.py:50 Affects: INFO  LogCapture:health.py:52 -- Possible Performance Impact INFO  LogCapture:health.py:52 -- /var/log/dirsrv/slapd-standalone1/access INFO  LogCapture:health.py:53 Details: INFO  LogCapture:health.py:54 ----------- INFO  LogCapture:health.py:55 Found 1 searches in the current access log that are using an unknown attribute in the search filter. [1] Invalid Attribute in Filter - date: . - conn/op: 1/2 - filter: (&(|(objectClass=nsAccount)(objectClass=nsPerson)(objectClass=simpleSecurityObject)(objectClass=organization)(objectClass=person)(objectClass=account)(objectClass=organizationalUnit)(objectClass=netscapeServer)(objectClass=domain)(objectClass=posixAccount)(objectClass=shadowAccount)(objectClass=posixGroup)(objectClass=mailRecipient))(unknown=test)) INFO  LogCapture:health.py:56 Resolution: INFO  LogCapture:health.py:57 ----------- INFO  LogCapture:health.py:58 Stop using this these unknown attributes in the filter, or add the schema to the server and make sure it's properly indexed. INFO  LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== INFO  LogCapture:health.py:126 [ { "dsle": "DSLOGNOTES0002", "severity": "Medium", "description": "Unknown Attribute In Filter", "items": [ "Possible Performance Impact", "/var/log/dirsrv/slapd-standalone1/access" ], "detail": "Found 1 searches in the current access log that are using an\nunknown attribute in the search filter.\n\n [1] Invalid Attribute in Filter\n - date: .\n - conn/op: 1/2\n - filter: (&(|(objectClass=nsAccount)(objectClass=nsPerson)(objectClass=simpleSecurityObject)(objectClass=organization)(objectClass=person)(objectClass=account)(objectClass=organizationalUnit)(objectClass=netscapeServer)(objectClass=domain)(objectClass=posixAccount)(objectClass=shadowAccount)(objectClass=posixGroup)(objectClass=mailRecipient))(unknown=test))\n", "fix": "Stop using this these unknown attributes in the filter, or add the schema\nto the server and make sure it's properly indexed.", "check": "logs:notes" } ]
Passed suites/healthcheck/health_repl_test.py::test_healthcheck_replication_replica_not_reachable 2.20
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 7a616ca7-8dcf-4cbc-b5d1-2e0eb63891df / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 961f1dcc-1794-4be2-9203-87aff15178c5 / got description=7a616ca7-8dcf-4cbc-b5d1-2e0eb63891df) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect f89a9125-f581-4b39-b9c4-a45c2f74a74f / got description=961f1dcc-1794-4be2-9203-87aff15178c5) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking replication:agmts_status ... INFO  LogCapture:health.py:99 Checking replication:conflicts ... INFO  LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:119 1 Issue found! Generating report ... INFO  LogCapture:health.py:45 [1] DS Lint Error: DSREPLLE0005 INFO  LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO  LogCapture:health.py:47 Severity: MEDIUM INFO  LogCapture:health.py:49 Check: replication:agmts_status INFO  LogCapture:health.py:50 Affects: INFO  LogCapture:health.py:52 -- Replication INFO  LogCapture:health.py:52 -- Agreement INFO  LogCapture:health.py:53 Details: INFO  LogCapture:health.py:54 ----------- INFO  LogCapture:health.py:55 The replication agreement (002) under "dc=example,dc=com" is not in synchronization, because the consumer server is not reachable. INFO  LogCapture:health.py:56 Resolution: INFO  LogCapture:health.py:57 ----------- INFO  LogCapture:health.py:58 Check if the consumer is running, and also check the errors log for more information. INFO  LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== INFO  LogCapture:health.py:126 [ { "dsle": "DSREPLLE0005", "severity": "MEDIUM", "description": "Replication consumer not reachable.", "items": [ "Replication", "Agreement" ], "detail": "The replication agreement (002) under \"dc=example,dc=com\" is not in synchronization,\nbecause the consumer server is not reachable.", "fix": "Check if the consumer is running, and also check the errors log for more information.", "check": "replication:agmts_status" } ] INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect f3d0732e-02c7-444b-9748-93994c19dced / got description=f89a9125-f581-4b39-b9c4-a45c2f74a74f) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking replication:agmts_status ... INFO  LogCapture:health.py:99 Checking replication:conflicts ... INFO  LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:111 No issues found. INFO  LogCapture:health.py:113 []
Passed suites/healthcheck/health_repl_test.py::test_healthcheck_changelog_trimming_not_configured 3.18
-------------------------------Captured log call--------------------------------
INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking replication:agmts_status ... INFO  LogCapture:health.py:99 Checking replication:conflicts ... INFO  LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:119 1 Issue found! Generating report ... INFO  LogCapture:health.py:45 [1] DS Lint Error: DSCLLE0001 INFO  LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO  LogCapture:health.py:47 Severity: LOW INFO  LogCapture:health.py:49 Check: backends:userroot::cl_trimming INFO  LogCapture:health.py:50 Affects: INFO  LogCapture:health.py:52 -- Replication INFO  LogCapture:health.py:52 -- Changelog INFO  LogCapture:health.py:52 -- Backends INFO  LogCapture:health.py:53 Details: INFO  LogCapture:health.py:54 ----------- INFO  LogCapture:health.py:55 The replication changelog does have any kind of trimming configured. This will lead to the changelog size growing indefinitely. INFO  LogCapture:health.py:56 Resolution: INFO  LogCapture:health.py:57 ----------- INFO  LogCapture:health.py:58 Configure changelog trimming, preferably by setting the maximum age of a changelog record. Here is an example: # dsconf slapd-master1 replication set-changelog --suffix YOUR_SUFFIX --max-age 30d INFO  LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== INFO  LogCapture:health.py:126 [ { "dsle": "DSCLLE0001", "severity": "LOW", "description": "Changelog trimming not configured.", "items": [ "Replication", "Changelog", "Backends" ], "detail": "The replication changelog does have any kind of trimming configured. This will\nlead to the changelog size growing indefinitely.", "fix": "Configure changelog trimming, preferably by setting the maximum age of a changelog\nrecord. Here is an example:\n\n # dsconf slapd-master1 replication set-changelog --suffix YOUR_SUFFIX --max-age 30d", "check": "backends:userroot::cl_trimming" } ] INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking replication:agmts_status ... INFO  LogCapture:health.py:99 Checking replication:conflicts ... INFO  LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:111 No issues found. INFO  LogCapture:health.py:113 []
Passed suites/healthcheck/health_repl_test.py::test_healthcheck_replication_presence_of_conflict_entries 3.44
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect d132f992-7cd9-4444-99bf-29b44672915d / got description=f3d0732e-02c7-444b-9748-93994c19dced) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 847a91e4-b72e-4e5c-8a18-a7f50796f4f6 / got description=d132f992-7cd9-4444-99bf-29b44672915d) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 9410494b-2cf4-446c-b616-1ce2d7de959f / got description=847a91e4-b72e-4e5c-8a18-a7f50796f4f6) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking replication:agmts_status ... INFO  LogCapture:health.py:99 Checking replication:conflicts ... INFO  LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:119 1 Issue found! Generating report ... INFO  LogCapture:health.py:45 [1] DS Lint Error: DSREPLLE0002 INFO  LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO  LogCapture:health.py:47 Severity: LOW INFO  LogCapture:health.py:49 Check: replication:conflicts INFO  LogCapture:health.py:50 Affects: INFO  LogCapture:health.py:52 -- Replication INFO  LogCapture:health.py:52 -- Conflict Entries INFO  LogCapture:health.py:53 Details: INFO  LogCapture:health.py:54 ----------- INFO  LogCapture:health.py:55 There were 1 conflict entries found under the replication suffix "dc=example,dc=com". INFO  LogCapture:health.py:56 Resolution: INFO  LogCapture:health.py:57 ----------- INFO  LogCapture:health.py:58 While conflict entries are expected to occur in an MMR environment, they should be resolved. In regards to conflict entries there is always the original/counterpart entry that has a normal DN, and then the conflict version of that entry. Technically both entries are valid, you as the administrator, needs to decide which entry you want to keep. First examine/compare both entries to determine which one you want to keep or remove. You can use the CLI tool "dsconf" to resolve the conflict. Here is an example: List the conflict entries: # dsconf slapd-master1 repl-conflict list dc=example,dc=com Examine conflict entry and its counterpart entry: # dsconf slapd-master1 repl-conflict compare <DN of conflict entry> Remove conflict entry and keep only the original/counterpart entry: # dsconf slapd-master1 repl-conflict delete <DN of conflict entry> Replace the original/counterpart entry with the conflict entry: # dsconf slapd-master1 repl-conflict swap <DN of conflict entry> INFO  LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== INFO  LogCapture:health.py:126 [ { "dsle": "DSREPLLE0002", "severity": "LOW", "description": "Replication conflict entries found.", "items": [ "Replication", "Conflict Entries" ], "detail": "There were 1 conflict entries found under the replication suffix \"dc=example,dc=com\".", "fix": "While conflict entries are expected to occur in an MMR environment, they\nshould be resolved. In regards to conflict entries there is always the original/counterpart\nentry that has a normal DN, and then the conflict version of that entry. Technically both\nentries are valid, you as the administrator, needs to decide which entry you want to keep.\nFirst examine/compare both entries to determine which one you want to keep or remove. You\ncan use the CLI tool \"dsconf\" to resolve the conflict. Here is an example:\n\n List the conflict entries:\n\n # dsconf slapd-master1 repl-conflict list dc=example,dc=com\n\n Examine conflict entry and its counterpart entry:\n\n # dsconf slapd-master1 repl-conflict compare <DN of conflict entry>\n\n Remove conflict entry and keep only the original/counterpart entry:\n\n # dsconf slapd-master1 repl-conflict delete <DN of conflict entry>\n\n Replace the original/counterpart entry with the conflict entry:\n\n # dsconf slapd-master1 repl-conflict swap <DN of conflict entry>\n", "check": "replication:conflicts" } ]
Passed suites/healthcheck/health_repl_test.py::test_healthcheck_replication_out_of_sync_broken 0.68
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master3 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'master3', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 39adba5e-15e6-48b2-b8b5-23e47de8dab3 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 9df74bb9-bd7e-4d8a-8105-53cd886d48a7 / got description=39adba5e-15e6-48b2-b8b5-23e47de8dab3) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:156 Joining master master3 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 62de050a-8e20-41a9-b10c-1a01e768c142 / got description=9df74bb9-bd7e-4d8a-8105-53cd886d48a7) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 3dd34bb1-7f1f-4f07-b76f-22eaa90808d1 / got description=62de050a-8e20-41a9-b10c-1a01e768c142) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master3 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master3 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master2 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created
-------------------------------Captured log call--------------------------------
INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking replication:agmts_status ... INFO  LogCapture:health.py:99 Checking replication:conflicts ... INFO  LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:119 3 Issues found! Generating report ... INFO  LogCapture:health.py:45 [1] DS Lint Error: DSREPLLE0001 INFO  LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO  LogCapture:health.py:47 Severity: HIGH INFO  LogCapture:health.py:49 Check: replication:agmts_status INFO  LogCapture:health.py:50 Affects: INFO  LogCapture:health.py:52 -- Replication INFO  LogCapture:health.py:52 -- Agreement INFO  LogCapture:health.py:53 Details: INFO  LogCapture:health.py:54 ----------- INFO  LogCapture:health.py:55 The replication agreement (002) under "dc=example,dc=com" is not in synchronization. INFO  LogCapture:health.py:56 Resolution: INFO  LogCapture:health.py:57 ----------- INFO  LogCapture:health.py:58 You may need to reinitialize this replication agreement. Please check the errors log for more information. If you do need to reinitialize the agreement you can do so using dsconf. Here is an example: # dsconf slapd-master1 repl-agmt init "002" --suffix dc=example,dc=com INFO  LogCapture:health.py:45 [2] DS Lint Error: DSREPLLE0001 INFO  LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO  LogCapture:health.py:47 Severity: HIGH INFO  LogCapture:health.py:49 Check: replication:agmts_status INFO  LogCapture:health.py:50 Affects: INFO  LogCapture:health.py:52 -- Replication INFO  LogCapture:health.py:52 -- Agreement INFO  LogCapture:health.py:53 Details: INFO  LogCapture:health.py:54 ----------- INFO  LogCapture:health.py:55 The replication agreement (003) under "dc=example,dc=com" is not in synchronization. INFO  LogCapture:health.py:56 Resolution: INFO  LogCapture:health.py:57 ----------- INFO  LogCapture:health.py:58 You may need to reinitialize this replication agreement. Please check the errors log for more information. If you do need to reinitialize the agreement you can do so using dsconf. Here is an example: # dsconf slapd-master1 repl-agmt init "003" --suffix dc=example,dc=com INFO  LogCapture:health.py:45 [3] DS Lint Error: DSCLLE0001 INFO  LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO  LogCapture:health.py:47 Severity: LOW INFO  LogCapture:health.py:49 Check: backends:userroot::cl_trimming INFO  LogCapture:health.py:50 Affects: INFO  LogCapture:health.py:52 -- Replication INFO  LogCapture:health.py:52 -- Changelog INFO  LogCapture:health.py:52 -- Backends INFO  LogCapture:health.py:53 Details: INFO  LogCapture:health.py:54 ----------- INFO  LogCapture:health.py:55 The replication changelog does have any kind of trimming configured. This will lead to the changelog size growing indefinitely. INFO  LogCapture:health.py:56 Resolution: INFO  LogCapture:health.py:57 ----------- INFO  LogCapture:health.py:58 Configure changelog trimming, preferably by setting the maximum age of a changelog record. Here is an example: # dsconf slapd-master1 replication set-changelog --suffix YOUR_SUFFIX --max-age 30d INFO  LogCapture:health.py:124 ===== End Of Report (3 Issues found) ===== INFO  LogCapture:health.py:126 [ { "dsle": "DSREPLLE0001", "severity": "HIGH", "description": "Replication agreement not set to be synchronized.", "items": [ "Replication", "Agreement" ], "detail": "The replication agreement (002) under \"dc=example,dc=com\" is not in synchronization.", "fix": "You may need to reinitialize this replication agreement. Please check the errors\nlog for more information. If you do need to reinitialize the agreement you can do so\nusing dsconf. Here is an example:\n\n # dsconf slapd-master1 repl-agmt init \"002\" --suffix dc=example,dc=com", "check": "replication:agmts_status" }, { "dsle": "DSREPLLE0001", "severity": "HIGH", "description": "Replication agreement not set to be synchronized.", "items": [ "Replication", "Agreement" ], "detail": "The replication agreement (003) under \"dc=example,dc=com\" is not in synchronization.", "fix": "You may need to reinitialize this replication agreement. Please check the errors\nlog for more information. If you do need to reinitialize the agreement you can do so\nusing dsconf. Here is an example:\n\n # dsconf slapd-master1 repl-agmt init \"003\" --suffix dc=example,dc=com", "check": "replication:agmts_status" }, { "dsle": "DSCLLE0001", "severity": "LOW", "description": "Changelog trimming not configured.", "items": [ "Replication", "Changelog", "Backends" ], "detail": "The replication changelog does have any kind of trimming configured. This will\nlead to the changelog size growing indefinitely.", "fix": "Configure changelog trimming, preferably by setting the maximum age of a changelog\nrecord. Here is an example:\n\n # dsconf slapd-master1 replication set-changelog --suffix YOUR_SUFFIX --max-age 30d", "check": "backends:userroot::cl_trimming" } ]
Passed suites/healthcheck/health_security_test.py::test_healthcheck_insecure_pwd_hash_configured 0.37
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking config:hr_timestamp ... INFO  LogCapture:health.py:99 Checking config:passwordscheme ... INFO  LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO  LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO  LogCapture:health.py:99 Checking fschecks:file_perms ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:119 1 Issue found! Generating report ... INFO  LogCapture:health.py:45 [1] DS Lint Error: DSCLE0002 INFO  LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO  LogCapture:health.py:47 Severity: HIGH INFO  LogCapture:health.py:49 Check: config:passwordscheme INFO  LogCapture:health.py:50 Affects: INFO  LogCapture:health.py:52 -- cn=config INFO  LogCapture:health.py:53 Details: INFO  LogCapture:health.py:54 ----------- INFO  LogCapture:health.py:55 Password storage schemes in Directory Server define how passwords are hashed via a one-way mathematical function for storage. Knowing the hash it is difficult to gain the input, but knowing the input you can easily compare the hash. Many hashes are well known for cryptograhpic verification properties, but are designed to be *fast* to validate. This is the opposite of what we desire for password storage. In the unlikely event of a disclosure, you want hashes to be *difficult* to verify, as this adds a cost of work to an attacker. In Directory Server, we offer one hash suitable for this (PBKDF2_SHA256) and one hash for "legacy" support (SSHA512). Your configuration does not use these for password storage or the root password storage scheme. INFO  LogCapture:health.py:56 Resolution: INFO  LogCapture:health.py:57 ----------- INFO  LogCapture:health.py:58 Perform a configuration reset of the values: passwordStorageScheme nsslapd-rootpwstoragescheme IE, stop Directory Server, and in dse.ldif delete these two lines. When Directory Server is started, they will use the server provided defaults that are secure. You can also use 'dsconf' to replace these values. Here is an example: # dsconf slapd-standalone1 config replace passwordStorageScheme=PBKDF2_SHA256 nsslapd-rootpwstoragescheme=PBKDF2_SHA256 INFO  LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== INFO  LogCapture:health.py:126 [ { "dsle": "DSCLE0002", "severity": "HIGH", "description": "Weak passwordStorageScheme.", "items": [ "cn=config" ], "detail": "Password storage schemes in Directory Server define how passwords are hashed via a\none-way mathematical function for storage. Knowing the hash it is difficult to gain\nthe input, but knowing the input you can easily compare the hash.\n\nMany hashes are well known for cryptograhpic verification properties, but are\ndesigned to be *fast* to validate. This is the opposite of what we desire for password\nstorage. In the unlikely event of a disclosure, you want hashes to be *difficult* to\nverify, as this adds a cost of work to an attacker.\n\nIn Directory Server, we offer one hash suitable for this (PBKDF2_SHA256) and one hash\nfor \"legacy\" support (SSHA512).\n\nYour configuration does not use these for password storage or the root password storage\nscheme.\n", "fix": "Perform a configuration reset of the values:\n\npasswordStorageScheme\nnsslapd-rootpwstoragescheme\n\nIE, stop Directory Server, and in dse.ldif delete these two lines. When Directory Server\nis started, they will use the server provided defaults that are secure.\n\nYou can also use 'dsconf' to replace these values. Here is an example:\n\n # dsconf slapd-standalone1 config replace passwordStorageScheme=PBKDF2_SHA256 nsslapd-rootpwstoragescheme=PBKDF2_SHA256", "check": "config:passwordscheme" } ] INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking config:hr_timestamp ... INFO  LogCapture:health.py:99 Checking config:passwordscheme ... INFO  LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO  LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO  LogCapture:health.py:99 Checking fschecks:file_perms ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:111 No issues found. INFO  LogCapture:health.py:113 []
Passed suites/healthcheck/health_security_test.py::test_healthcheck_min_allowed_tls_version_too_low 23.08
------------------------------Captured stdout call------------------------------
Setting system policy to LEGACY Note: System-wide crypto policies are applied on application start-up. It is recommended to restart the system for the change of policies to fully take place.
-------------------------------Captured log call--------------------------------
INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking config:hr_timestamp ... INFO  LogCapture:health.py:99 Checking config:passwordscheme ... INFO  LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO  LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO  LogCapture:health.py:99 Checking fschecks:file_perms ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:119 1 Issue found! Generating report ... INFO  LogCapture:health.py:45 [1] DS Lint Error: DSELE0001 INFO  LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO  LogCapture:health.py:47 Severity: MEDIUM INFO  LogCapture:health.py:49 Check: encryption:check_tls_version INFO  LogCapture:health.py:50 Affects: INFO  LogCapture:health.py:52 -- cn=encryption,cn=config INFO  LogCapture:health.py:53 Details: INFO  LogCapture:health.py:54 ----------- INFO  LogCapture:health.py:55 This Directory Server may not be using strong TLS protocol versions. TLS1.0 is known to have a number of issues with the protocol. Please see: https://tools.ietf.org/html/rfc7457 It is advised you set this value to the maximum possible. INFO  LogCapture:health.py:56 Resolution: INFO  LogCapture:health.py:57 ----------- INFO  LogCapture:health.py:58 There are two options for setting the TLS minimum version allowed. You, can set "sslVersionMin" in "cn=encryption,cn=config" to a version greater than "TLS1.0" You can also use 'dsconf' to set this value. Here is an example: # dsconf slapd-standalone1 security set --tls-protocol-min=TLS1.2 You must restart the Directory Server for this change to take effect. Or, you can set the system wide crypto policy to FUTURE which will use a higher TLS minimum version, but doing this affects the entire system: # update-crypto-policies --set FUTURE INFO  LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== INFO  LogCapture:health.py:126 [ { "dsle": "DSELE0001", "severity": "MEDIUM", "description": "Weak TLS protocol version.", "items": [ "cn=encryption,cn=config" ], "detail": "This Directory Server may not be using strong TLS protocol versions. TLS1.0 is known to\nhave a number of issues with the protocol. Please see:\n\nhttps://tools.ietf.org/html/rfc7457\n\nIt is advised you set this value to the maximum possible.", "fix": "There are two options for setting the TLS minimum version allowed. You,\ncan set \"sslVersionMin\" in \"cn=encryption,cn=config\" to a version greater than \"TLS1.0\"\nYou can also use 'dsconf' to set this value. Here is an example:\n\n # dsconf slapd-standalone1 security set --tls-protocol-min=TLS1.2\n\nYou must restart the Directory Server for this change to take effect.\n\nOr, you can set the system wide crypto policy to FUTURE which will use a higher TLS\nminimum version, but doing this affects the entire system:\n\n # update-crypto-policies --set FUTURE", "check": "encryption:check_tls_version" } ] INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking config:hr_timestamp ... INFO  LogCapture:health.py:99 Checking config:passwordscheme ... INFO  LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO  LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO  LogCapture:health.py:99 Checking fschecks:file_perms ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:111 No issues found. INFO  LogCapture:health.py:113 []
Passed suites/healthcheck/health_security_test.py::test_healthcheck_resolvconf_bad_file_perm 1.45
-------------------------------Captured log call--------------------------------
INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking config:hr_timestamp ... INFO  LogCapture:health.py:99 Checking config:passwordscheme ... INFO  LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO  LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO  LogCapture:health.py:99 Checking fschecks:file_perms ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:119 1 Issue found! Generating report ... INFO  LogCapture:health.py:45 [1] DS Lint Error: DSPERMLE0001 INFO  LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO  LogCapture:health.py:47 Severity: MEDIUM INFO  LogCapture:health.py:49 Check: fschecks:file_perms INFO  LogCapture:health.py:50 Affects: INFO  LogCapture:health.py:52 -- File Permissions INFO  LogCapture:health.py:52 -- /etc/resolv.conf INFO  LogCapture:health.py:53 Details: INFO  LogCapture:health.py:54 ----------- INFO  LogCapture:health.py:55 The file "/etc/resolv.conf" does not have the expected permissions (644). This can cause issues with replication and chaining. INFO  LogCapture:health.py:56 Resolution: INFO  LogCapture:health.py:57 ----------- INFO  LogCapture:health.py:58 Change the file permissions: # chmod 644 /etc/resolv.conf INFO  LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== INFO  LogCapture:health.py:126 [ { "dsle": "DSPERMLE0001", "severity": "MEDIUM", "description": "Incorrect file permissions.", "items": [ "File Permissions", "/etc/resolv.conf" ], "detail": "The file \"/etc/resolv.conf\" does not have the expected permissions (644). This\ncan cause issues with replication and chaining.", "fix": "Change the file permissions:\n\n # chmod 644 /etc/resolv.conf", "check": "fschecks:file_perms" } ] INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking config:hr_timestamp ... INFO  LogCapture:health.py:99 Checking config:passwordscheme ... INFO  LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO  LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO  LogCapture:health.py:99 Checking fschecks:file_perms ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:111 No issues found. INFO  LogCapture:health.py:113 []
Passed suites/healthcheck/health_security_test.py::test_healthcheck_pwdfile_bad_file_perm 0.94
-------------------------------Captured log call--------------------------------
INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking config:hr_timestamp ... INFO  LogCapture:health.py:99 Checking config:passwordscheme ... INFO  LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO  LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO  LogCapture:health.py:99 Checking fschecks:file_perms ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:119 1 Issue found! Generating report ... INFO  LogCapture:health.py:45 [1] DS Lint Error: DSPERMLE0002 INFO  LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO  LogCapture:health.py:47 Severity: HIGH INFO  LogCapture:health.py:49 Check: fschecks:file_perms INFO  LogCapture:health.py:50 Affects: INFO  LogCapture:health.py:52 -- File Permissions INFO  LogCapture:health.py:52 -- /etc/dirsrv/slapd-standalone1/pwdfile.txt INFO  LogCapture:health.py:53 Details: INFO  LogCapture:health.py:54 ----------- INFO  LogCapture:health.py:55 The file "/etc/dirsrv/slapd-standalone1/pwdfile.txt" does not have the expected permissions (400). The security database pin/password files should only be readable by Directory Server user. INFO  LogCapture:health.py:56 Resolution: INFO  LogCapture:health.py:57 ----------- INFO  LogCapture:health.py:58 Change the file permissions: # chmod 400 /etc/dirsrv/slapd-standalone1/pwdfile.txt INFO  LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== INFO  LogCapture:health.py:126 [ { "dsle": "DSPERMLE0002", "severity": "HIGH", "description": "Incorrect security database file permissions.", "items": [ "File Permissions", "/etc/dirsrv/slapd-standalone1/pwdfile.txt" ], "detail": "The file \"/etc/dirsrv/slapd-standalone1/pwdfile.txt\" does not have the expected permissions (400). The\nsecurity database pin/password files should only be readable by Directory Server user.", "fix": "Change the file permissions:\n\n # chmod 400 /etc/dirsrv/slapd-standalone1/pwdfile.txt", "check": "fschecks:file_perms" } ] INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking config:hr_timestamp ... INFO  LogCapture:health.py:99 Checking config:passwordscheme ... INFO  LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO  LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO  LogCapture:health.py:99 Checking fschecks:file_perms ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:111 No issues found. INFO  LogCapture:health.py:113 []
Passed suites/healthcheck/health_sync_test.py::test_healthcheck_replication_out_of_sync_not_broken 32.16
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master3 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'master3', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect cea14fb8-7a3e-4efa-9249-b7a1a8af0d50 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect e340ba40-886a-4e80-8b4a-c0f42d75d059 / got description=cea14fb8-7a3e-4efa-9249-b7a1a8af0d50) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:156 Joining master master3 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect eb5e4bc1-bf63-48f5-bffa-f126941de068 / got description=e340ba40-886a-4e80-8b4a-c0f42d75d059) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect d3b59b8f-f44c-49d2-85b1-7a5fe0e81ead / got description=eb5e4bc1-bf63-48f5-bffa-f126941de068) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect d3b59b8f-f44c-49d2-85b1-7a5fe0e81ead / got description=eb5e4bc1-bf63-48f5-bffa-f126941de068) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect d3b59b8f-f44c-49d2-85b1-7a5fe0e81ead / got description=eb5e4bc1-bf63-48f5-bffa-f126941de068) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master3 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master3 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master2 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created
-------------------------------Captured log call--------------------------------
INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking replication:agmts_status ... INFO  LogCapture:health.py:99 Checking replication:conflicts ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:119 1 Issue found! Generating report ... INFO  LogCapture:health.py:45 [1] DS Lint Error: DSREPLLE0003 INFO  LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO  LogCapture:health.py:47 Severity: MEDIUM INFO  LogCapture:health.py:49 Check: replication:agmts_status INFO  LogCapture:health.py:50 Affects: INFO  LogCapture:health.py:52 -- Replication INFO  LogCapture:health.py:52 -- Agreement INFO  LogCapture:health.py:53 Details: INFO  LogCapture:health.py:54 ----------- INFO  LogCapture:health.py:55 The replication agreement (001) under "dc=example,dc=com" is not in synchronization. Status message: error (1) can't acquire busy replica (unable to acquire replica: the replica is currently being updated by another supplier.) INFO  LogCapture:health.py:56 Resolution: INFO  LogCapture:health.py:57 ----------- INFO  LogCapture:health.py:58 Replication is not in synchronization but it may recover. Continue to monitor this agreement. INFO  LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== INFO  LogCapture:health.py:126 [ { "dsle": "DSREPLLE0003", "severity": "MEDIUM", "description": "Unsynchronized replication agreement.", "items": [ "Replication", "Agreement" ], "detail": "The replication agreement (001) under \"dc=example,dc=com\" is not in synchronization.\nStatus message: error (1) can't acquire busy replica (unable to acquire replica: the replica is currently being updated by another supplier.)", "fix": "Replication is not in synchronization but it may recover. Continue to\nmonitor this agreement.", "check": "replication:agmts_status" } ]
Passed suites/healthcheck/healthcheck_test.py::test_healthcheck_disabled_suffix 1.35
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking config:hr_timestamp ... INFO  LogCapture:health.py:99 Checking config:passwordscheme ... INFO  LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO  LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO  LogCapture:health.py:99 Checking backends:userroot:search ... INFO  LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO  LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO  LogCapture:health.py:99 Checking fschecks:file_perms ... INFO  LogCapture:health.py:99 Checking refint:attr_indexes ... INFO  LogCapture:health.py:99 Checking refint:update_delay ... INFO  LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO  LogCapture:health.py:99 Checking replication:agmts_status ... INFO  LogCapture:health.py:99 Checking replication:conflicts ... INFO  LogCapture:health.py:99 Checking dseldif:nsstate ... INFO  LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO  LogCapture:health.py:99 Checking logs:notes ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:119 2 Issues found! Generating report ... INFO  LogCapture:health.py:45 [1] DS Lint Error: DSBLE0001 INFO  LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO  LogCapture:health.py:47 Severity: MEDIUM INFO  LogCapture:health.py:49 Check: backends:userroot:mappingtree INFO  LogCapture:health.py:50 Affects: INFO  LogCapture:health.py:52 -- userroot INFO  LogCapture:health.py:53 Details: INFO  LogCapture:health.py:54 ----------- INFO  LogCapture:health.py:55 This backend may be missing the correct mapping tree references. Mapping Trees allow the directory server to determine which backend an operation is routed to in the abscence of other information. This is extremely important for correct functioning of LDAP ADD for example. A correct Mapping tree for this backend must contain the suffix name, the database name and be a backend type. IE: cn=o3Dexample,cn=mapping tree,cn=config cn: o=example nsslapd-backend: userRoot nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree INFO  LogCapture:health.py:56 Resolution: INFO  LogCapture:health.py:57 ----------- INFO  LogCapture:health.py:58 Either you need to create the mapping tree, or you need to repair the related mapping tree. You will need to do this by hand by editing cn=config, or stopping the instance and editing dse.ldif. INFO  LogCapture:health.py:45 [2] DS Lint Error: DSBLE0002 INFO  LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO  LogCapture:health.py:47 Severity: HIGH INFO  LogCapture:health.py:49 Check: backends:userroot:search INFO  LogCapture:health.py:50 Affects: INFO  LogCapture:health.py:52 -- dc=example,dc=com INFO  LogCapture:health.py:53 Details: INFO  LogCapture:health.py:54 ----------- INFO  LogCapture:health.py:55 Unable to query the backend. LDAP error ({'msgtype': 101, 'msgid': 26, 'result': 1, 'desc': 'Operations error', 'ctrls': [], 'info': 'Warning: Operation attempted on a disabled node : dc=example,dc=com\n'}) INFO  LogCapture:health.py:56 Resolution: INFO  LogCapture:health.py:57 ----------- INFO  LogCapture:health.py:58 Check the server's error and access logs for more information. INFO  LogCapture:health.py:124 ===== End Of Report (2 Issues found) ===== INFO  LogCapture:health.py:126 [ { "dsle": "DSBLE0001", "severity": "MEDIUM", "description": "Possibly incorrect mapping tree.", "items": [ "userroot" ], "detail": "This backend may be missing the correct mapping tree references. Mapping Trees allow\nthe directory server to determine which backend an operation is routed to in the\nabscence of other information. This is extremely important for correct functioning\nof LDAP ADD for example.\n\nA correct Mapping tree for this backend must contain the suffix name, the database name\nand be a backend type. IE:\n\ncn=o3Dexample,cn=mapping tree,cn=config\ncn: o=example\nnsslapd-backend: userRoot\nnsslapd-state: backend\nobjectClass: top\nobjectClass: extensibleObject\nobjectClass: nsMappingTree\n\n", "fix": "Either you need to create the mapping tree, or you need to repair the related\nmapping tree. You will need to do this by hand by editing cn=config, or stopping\nthe instance and editing dse.ldif.\n", "check": "backends:userroot:mappingtree" }, { "dsle": "DSBLE0002", "severity": "HIGH", "description": "Unable to query backend.", "items": [ "dc=example,dc=com" ], "detail": "Unable to query the backend. LDAP error ({'msgtype': 101, 'msgid': 26, 'result': 1, 'desc': 'Operations error', 'ctrls': [], 'info': 'Warning: Operation attempted on a disabled node : dc=example,dc=com\\n'})", "fix": "Check the server's error and access logs for more information.", "check": "backends:userroot:search" } ]
Passed suites/healthcheck/healthcheck_test.py::test_healthcheck_standalone 0.46
-------------------------------Captured log call--------------------------------
INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking config:hr_timestamp ... INFO  LogCapture:health.py:99 Checking config:passwordscheme ... INFO  LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO  LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO  LogCapture:health.py:99 Checking backends:userroot:search ... INFO  LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO  LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO  LogCapture:health.py:99 Checking fschecks:file_perms ... INFO  LogCapture:health.py:99 Checking refint:attr_indexes ... INFO  LogCapture:health.py:99 Checking refint:update_delay ... INFO  LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO  LogCapture:health.py:99 Checking replication:agmts_status ... INFO  LogCapture:health.py:99 Checking replication:conflicts ... INFO  LogCapture:health.py:99 Checking dseldif:nsstate ... INFO  LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO  LogCapture:health.py:99 Checking logs:notes ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:111 No issues found. INFO  LogCapture:health.py:113 []
Passed suites/healthcheck/healthcheck_test.py::test_healthcheck_list_checks 0.03
-------------------------------Captured log call--------------------------------
INFO  LogCapture:health.py:90 config:hr_timestamp INFO  LogCapture:health.py:90 config:passwordscheme INFO  LogCapture:health.py:90 backends:userroot:cl_trimming INFO  LogCapture:health.py:90 backends:userroot:mappingtree INFO  LogCapture:health.py:90 backends:userroot:search INFO  LogCapture:health.py:90 backends:userroot:virt_attrs INFO  LogCapture:health.py:90 encryption:check_tls_version INFO  LogCapture:health.py:90 fschecks:file_perms INFO  LogCapture:health.py:90 refint:attr_indexes INFO  LogCapture:health.py:90 refint:update_delay INFO  LogCapture:health.py:90 monitor-disk-space:disk_space INFO  LogCapture:health.py:90 replication:agmts_status INFO  LogCapture:health.py:90 replication:conflicts INFO  LogCapture:health.py:90 dseldif:nsstate INFO  LogCapture:health.py:90 tls:certificate_expiration INFO  LogCapture:health.py:90 logs:notes
Passed suites/healthcheck/healthcheck_test.py::test_healthcheck_list_errors 0.01
-------------------------------Captured log call--------------------------------
INFO  LogCapture:health.py:71 DSBLE0001 :: Possibly incorrect mapping tree. INFO  LogCapture:health.py:71 DSBLE0002 :: Unable to query backend. INFO  LogCapture:health.py:71 DSBLE0003 :: Uninitialized backend database. INFO  LogCapture:health.py:71 DSCERTLE0001 :: Certificate about to expire. INFO  LogCapture:health.py:71 DSCERTLE0002 :: Certificate expired. INFO  LogCapture:health.py:71 DSCLE0001 :: Different log timestamp format. INFO  LogCapture:health.py:71 DSCLE0002 :: Weak passwordStorageScheme. INFO  LogCapture:health.py:71 DSCLLE0001 :: Changelog trimming not configured. INFO  LogCapture:health.py:71 DSDSLE0001 :: Low disk space. INFO  LogCapture:health.py:71 DSELE0001 :: Weak TLS protocol version. INFO  LogCapture:health.py:71 DSLOGNOTES0001 :: Unindexed Search INFO  LogCapture:health.py:71 DSLOGNOTES0002 :: Unknown Attribute In Filter INFO  LogCapture:health.py:71 DSPERMLE0001 :: Incorrect file permissions. INFO  LogCapture:health.py:71 DSPERMLE0002 :: Incorrect security database file permissions. INFO  LogCapture:health.py:71 DSREPLLE0001 :: Replication agreement not set to be synchronized. INFO  LogCapture:health.py:71 DSREPLLE0002 :: Replication conflict entries found. INFO  LogCapture:health.py:71 DSREPLLE0003 :: Unsynchronized replication agreement. INFO  LogCapture:health.py:71 DSREPLLE0004 :: Unable to get replication agreement status. INFO  LogCapture:health.py:71 DSREPLLE0005 :: Replication consumer not reachable. INFO  LogCapture:health.py:71 DSRILE0001 :: Referential integrity plugin may be slower. INFO  LogCapture:health.py:71 DSRILE0002 :: Referential integrity plugin configured with unindexed attribute. INFO  LogCapture:health.py:71 DSSKEWLE0001 :: Medium time skew. INFO  LogCapture:health.py:71 DSSKEWLE0002 :: Major time skew. INFO  LogCapture:health.py:71 DSSKEWLE0003 :: Extensive time skew. INFO  LogCapture:health.py:71 DSVIRTLE0001 :: Virtual attribute indexed.
Passed suites/healthcheck/healthcheck_test.py::test_healthcheck_check_option 2.40
-------------------------------Captured log call--------------------------------
INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking config:hr_timestamp ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:111 No issues found. INFO  LogCapture:health.py:113 [] INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking config:passwordscheme ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:111 No issues found. INFO  LogCapture:health.py:113 [] INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:111 No issues found. INFO  LogCapture:health.py:113 [] INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:111 No issues found. INFO  LogCapture:health.py:113 [] INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking backends:userroot:search ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:111 No issues found. INFO  LogCapture:health.py:113 [] INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:111 No issues found. INFO  LogCapture:health.py:113 [] INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:111 No issues found. INFO  LogCapture:health.py:113 [] INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking fschecks:file_perms ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:111 No issues found. INFO  LogCapture:health.py:113 [] INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking refint:attr_indexes ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:111 No issues found. INFO  LogCapture:health.py:113 [] INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking refint:update_delay ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:111 No issues found. INFO  LogCapture:health.py:113 [] INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:111 No issues found. INFO  LogCapture:health.py:113 [] INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking replication:agmts_status ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:111 No issues found. INFO  LogCapture:health.py:113 [] INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking replication:conflicts ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:111 No issues found. INFO  LogCapture:health.py:113 [] INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking dseldif:nsstate ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:111 No issues found. INFO  LogCapture:health.py:113 [] INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:111 No issues found. INFO  LogCapture:health.py:113 [] INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking logs:notes ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:111 No issues found. INFO  LogCapture:health.py:113 []
Passed suites/healthcheck/healthcheck_test.py::test_healthcheck_standalone_tls 9.74
-------------------------------Captured log call--------------------------------
INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking config:hr_timestamp ... INFO  LogCapture:health.py:99 Checking config:passwordscheme ... INFO  LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO  LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO  LogCapture:health.py:99 Checking backends:userroot:search ... INFO  LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO  LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO  LogCapture:health.py:99 Checking fschecks:file_perms ... INFO  LogCapture:health.py:99 Checking refint:attr_indexes ... INFO  LogCapture:health.py:99 Checking refint:update_delay ... INFO  LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO  LogCapture:health.py:99 Checking replication:agmts_status ... INFO  LogCapture:health.py:99 Checking replication:conflicts ... INFO  LogCapture:health.py:99 Checking dseldif:nsstate ... INFO  LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO  LogCapture:health.py:99 Checking logs:notes ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:111 No issues found. INFO  LogCapture:health.py:113 []
Passed suites/healthcheck/healthcheck_test.py::test_healthcheck_replication 2.51
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect beb09e42-dc1f-41a1-83c0-7ee95cc7ec33 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 4246b7ee-6fa1-44f5-9265-c7d95b9b0927 / got description=beb09e42-dc1f-41a1-83c0-7ee95cc7ec33) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
-------------------------------Captured log call--------------------------------
INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking config:hr_timestamp ... INFO  LogCapture:health.py:99 Checking config:passwordscheme ... INFO  LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO  LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO  LogCapture:health.py:99 Checking backends:userroot:search ... INFO  LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO  LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO  LogCapture:health.py:99 Checking fschecks:file_perms ... INFO  LogCapture:health.py:99 Checking refint:attr_indexes ... INFO  LogCapture:health.py:99 Checking refint:update_delay ... INFO  LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO  LogCapture:health.py:99 Checking replication:agmts_status ... INFO  LogCapture:health.py:99 Checking replication:conflicts ... INFO  LogCapture:health.py:99 Checking dseldif:nsstate ... INFO  LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO  LogCapture:health.py:99 Checking logs:notes ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:111 No issues found. INFO  LogCapture:health.py:113 [] INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking config:hr_timestamp ... INFO  LogCapture:health.py:99 Checking config:passwordscheme ... INFO  LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO  LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO  LogCapture:health.py:99 Checking backends:userroot:search ... INFO  LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO  LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO  LogCapture:health.py:99 Checking fschecks:file_perms ... INFO  LogCapture:health.py:99 Checking refint:attr_indexes ... INFO  LogCapture:health.py:99 Checking refint:update_delay ... INFO  LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO  LogCapture:health.py:99 Checking replication:agmts_status ... INFO  LogCapture:health.py:99 Checking replication:conflicts ... INFO  LogCapture:health.py:99 Checking dseldif:nsstate ... INFO  LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO  LogCapture:health.py:99 Checking logs:notes ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:111 No issues found. INFO  LogCapture:health.py:113 []
Passed suites/healthcheck/healthcheck_test.py::test_healthcheck_replication_tls 23.87
-------------------------------Captured log call--------------------------------
INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking config:hr_timestamp ... INFO  LogCapture:health.py:99 Checking config:passwordscheme ... INFO  LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO  LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO  LogCapture:health.py:99 Checking backends:userroot:search ... INFO  LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO  LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO  LogCapture:health.py:99 Checking fschecks:file_perms ... INFO  LogCapture:health.py:99 Checking refint:attr_indexes ... INFO  LogCapture:health.py:99 Checking refint:update_delay ... INFO  LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO  LogCapture:health.py:99 Checking replication:agmts_status ... INFO  LogCapture:health.py:99 Checking replication:conflicts ... INFO  LogCapture:health.py:99 Checking dseldif:nsstate ... INFO  LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO  LogCapture:health.py:99 Checking logs:notes ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:111 No issues found. INFO  LogCapture:health.py:113 [] INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking config:hr_timestamp ... INFO  LogCapture:health.py:99 Checking config:passwordscheme ... INFO  LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO  LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO  LogCapture:health.py:99 Checking backends:userroot:search ... INFO  LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO  LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO  LogCapture:health.py:99 Checking fschecks:file_perms ... INFO  LogCapture:health.py:99 Checking refint:attr_indexes ... INFO  LogCapture:health.py:99 Checking refint:update_delay ... INFO  LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO  LogCapture:health.py:99 Checking replication:agmts_status ... INFO  LogCapture:health.py:99 Checking replication:conflicts ... INFO  LogCapture:health.py:99 Checking dseldif:nsstate ... INFO  LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO  LogCapture:health.py:99 Checking logs:notes ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:111 No issues found. INFO  LogCapture:health.py:113 []
Passed suites/healthcheck/healthcheck_test.py::test_healthcheck_backend_missing_mapping_tree 3.93
-------------------------------Captured log call--------------------------------
INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking config:hr_timestamp ... INFO  LogCapture:health.py:99 Checking config:passwordscheme ... INFO  LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO  LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO  LogCapture:health.py:99 Checking backends:userroot:search ... INFO  LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO  LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO  LogCapture:health.py:99 Checking fschecks:file_perms ... INFO  LogCapture:health.py:99 Checking refint:attr_indexes ... INFO  LogCapture:health.py:99 Checking refint:update_delay ... INFO  LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO  LogCapture:health.py:99 Checking replication:agmts_status ... INFO  LogCapture:health.py:99 Checking replication:conflicts ... INFO  LogCapture:health.py:99 Checking dseldif:nsstate ... INFO  LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO  LogCapture:health.py:99 Checking logs:notes ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:119 2 Issues found! Generating report ... INFO  LogCapture:health.py:45 [1] DS Lint Error: DSBLE0001 INFO  LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO  LogCapture:health.py:47 Severity: MEDIUM INFO  LogCapture:health.py:49 Check: backends:userroot:mappingtree INFO  LogCapture:health.py:50 Affects: INFO  LogCapture:health.py:52 -- userroot INFO  LogCapture:health.py:53 Details: INFO  LogCapture:health.py:54 ----------- INFO  LogCapture:health.py:55 This backend may be missing the correct mapping tree references. Mapping Trees allow the directory server to determine which backend an operation is routed to in the abscence of other information. This is extremely important for correct functioning of LDAP ADD for example. A correct Mapping tree for this backend must contain the suffix name, the database name and be a backend type. IE: cn=o3Dexample,cn=mapping tree,cn=config cn: o=example nsslapd-backend: userRoot nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree INFO  LogCapture:health.py:56 Resolution: INFO  LogCapture:health.py:57 ----------- INFO  LogCapture:health.py:58 Either you need to create the mapping tree, or you need to repair the related mapping tree. You will need to do this by hand by editing cn=config, or stopping the instance and editing dse.ldif. INFO  LogCapture:health.py:45 [2] DS Lint Error: DSBLE0003 INFO  LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO  LogCapture:health.py:47 Severity: LOW INFO  LogCapture:health.py:49 Check: backends:userroot:search INFO  LogCapture:health.py:50 Affects: INFO  LogCapture:health.py:52 -- dc=example,dc=com INFO  LogCapture:health.py:53 Details: INFO  LogCapture:health.py:54 ----------- INFO  LogCapture:health.py:55 The backend database has not been initialized yet INFO  LogCapture:health.py:56 Resolution: INFO  LogCapture:health.py:57 ----------- INFO  LogCapture:health.py:58 You need to import an LDIF file, or create the suffix entry, in order to initialize the database. INFO  LogCapture:health.py:124 ===== End Of Report (2 Issues found) ===== INFO  LogCapture:health.py:126 [ { "dsle": "DSBLE0001", "severity": "MEDIUM", "description": "Possibly incorrect mapping tree.", "items": [ "userroot" ], "detail": "This backend may be missing the correct mapping tree references. Mapping Trees allow\nthe directory server to determine which backend an operation is routed to in the\nabscence of other information. This is extremely important for correct functioning\nof LDAP ADD for example.\n\nA correct Mapping tree for this backend must contain the suffix name, the database name\nand be a backend type. IE:\n\ncn=o3Dexample,cn=mapping tree,cn=config\ncn: o=example\nnsslapd-backend: userRoot\nnsslapd-state: backend\nobjectClass: top\nobjectClass: extensibleObject\nobjectClass: nsMappingTree\n\n", "fix": "Either you need to create the mapping tree, or you need to repair the related\nmapping tree. You will need to do this by hand by editing cn=config, or stopping\nthe instance and editing dse.ldif.\n", "check": "backends:userroot:mappingtree" }, { "dsle": "DSBLE0003", "severity": "LOW", "description": "Uninitialized backend database.", "items": [ "dc=example,dc=com" ], "detail": "The backend database has not been initialized yet", "fix": "You need to import an LDIF file, or create the suffix entry, in order to initialize the database.", "check": "backends:userroot:search" } ] INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking config:hr_timestamp ... INFO  LogCapture:health.py:99 Checking config:passwordscheme ... INFO  LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO  LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO  LogCapture:health.py:99 Checking backends:userroot:search ... INFO  LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO  LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO  LogCapture:health.py:99 Checking fschecks:file_perms ... INFO  LogCapture:health.py:99 Checking refint:attr_indexes ... INFO  LogCapture:health.py:99 Checking refint:update_delay ... INFO  LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO  LogCapture:health.py:99 Checking replication:agmts_status ... INFO  LogCapture:health.py:99 Checking replication:conflicts ... INFO  LogCapture:health.py:99 Checking dseldif:nsstate ... INFO  LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO  LogCapture:health.py:99 Checking logs:notes ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:111 No issues found. INFO  LogCapture:health.py:113 []
Passed suites/healthcheck/healthcheck_test.py::test_healthcheck_database_not_initialized 0.23
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1'} was created.
-------------------------------Captured log call--------------------------------
INFO  LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO  LogCapture:health.py:99 Checking config:hr_timestamp ... INFO  LogCapture:health.py:99 Checking config:passwordscheme ... INFO  LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO  LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO  LogCapture:health.py:99 Checking backends:userroot:search ... INFO  LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO  LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO  LogCapture:health.py:99 Checking fschecks:file_perms ... INFO  LogCapture:health.py:99 Checking refint:attr_indexes ... INFO  LogCapture:health.py:99 Checking refint:update_delay ... INFO  LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO  LogCapture:health.py:99 Checking replication:agmts_status ... INFO  LogCapture:health.py:99 Checking replication:conflicts ... INFO  LogCapture:health.py:99 Checking dseldif:nsstate ... INFO  LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO  LogCapture:health.py:99 Checking logs:notes ... INFO  LogCapture:health.py:106 Healthcheck complete. INFO  LogCapture:health.py:119 1 Issue found! Generating report ... INFO  LogCapture:health.py:45 [1] DS Lint Error: DSBLE0003 INFO  LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO  LogCapture:health.py:47 Severity: LOW INFO  LogCapture:health.py:49 Check: backends:userroot:search INFO  LogCapture:health.py:50 Affects: INFO  LogCapture:health.py:52 -- dc=example,dc=com INFO  LogCapture:health.py:53 Details: INFO  LogCapture:health.py:54 ----------- INFO  LogCapture:health.py:55 The backend database has not been initialized yet INFO  LogCapture:health.py:56 Resolution: INFO  LogCapture:health.py:57 ----------- INFO  LogCapture:health.py:58 You need to import an LDIF file, or create the suffix entry, in order to initialize the database. INFO  LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== INFO  LogCapture:health.py:126 [ { "dsle": "DSBLE0003", "severity": "LOW", "description": "Uninitialized backend database.", "items": [ "dc=example,dc=com" ], "detail": "The backend database has not been initialized yet", "fix": "You need to import an LDIF file, or create the suffix entry, in order to initialize the database.", "check": "backends:userroot:search" } ]
Passed suites/import/import_test.py::test_import_with_index 6.83
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/import/import_test.py::test_crash_on_ldif2db 6.14
No log output captured.
Passed suites/import/import_test.py::test_ldif2db_allows_entries_without_a_parent_to_be_imported 4.52
-------------------------------Captured log call--------------------------------
ERROR  lib389:__init__.py:2647 ldif2db: Can't find file: /var/lib/dirsrv/slapd-standalone1/ldif/bogus.ldif
Passed suites/import/import_test.py::test_issue_a_warning_if_the_cache_size_is_smaller 7.16
No log output captured.
Passed suites/import/import_test.py::test_entry_with_escaped_characters_fails_to_import_and_index 12.66
------------------------------Captured stderr call------------------------------
[28/Oct/2020:21:31:02.263496282 -0400] - INFO - ldbm_instance_config_cachememsize_set - force a minimal value 512000 [28/Oct/2020:21:31:02.273426926 -0400] - INFO - check_and_set_import_cache - pagesize: 4096, available bytes 7503233024, process usage 22798336 [28/Oct/2020:21:31:02.276462463 -0400] - INFO - check_and_set_import_cache - Import allocates 16384KB import cache. [28/Oct/2020:21:31:02.279891278 -0400] - INFO - bdb_copy_directory - Backing up file 0 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T21:31:02.136702/userRoot/givenName.db) [28/Oct/2020:21:31:02.282708002 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/givenName.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T21:31:02.136702/userRoot/givenName.db [28/Oct/2020:21:31:02.285728804 -0400] - INFO - bdb_copy_directory - Backing up file 1 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T21:31:02.136702/userRoot/aci.db) [28/Oct/2020:21:31:02.288330342 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/aci.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T21:31:02.136702/userRoot/aci.db [28/Oct/2020:21:31:02.290870692 -0400] - INFO - bdb_copy_directory - Backing up file 2 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T21:31:02.136702/userRoot/sn.db) [28/Oct/2020:21:31:02.293649021 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/sn.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T21:31:02.136702/userRoot/sn.db [28/Oct/2020:21:31:02.296632912 -0400] - INFO - bdb_copy_directory - Backing up file 3 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T21:31:02.136702/userRoot/numsubordinates.db) [28/Oct/2020:21:31:02.302322183 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/numsubordinates.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T21:31:02.136702/userRoot/numsubordinates.db [28/Oct/2020:21:31:02.305482138 -0400] - INFO - bdb_copy_directory - Backing up file 4 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T21:31:02.136702/userRoot/nsuniqueid.db) [28/Oct/2020:21:31:02.308869087 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/nsuniqueid.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T21:31:02.136702/userRoot/nsuniqueid.db [28/Oct/2020:21:31:02.311562014 -0400] - INFO - bdb_copy_directory - Backing up file 5 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T21:31:02.136702/userRoot/ancestorid.db) [28/Oct/2020:21:31:02.315036510 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/ancestorid.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T21:31:02.136702/userRoot/ancestorid.db [28/Oct/2020:21:31:02.318676024 -0400] - INFO - bdb_copy_directory - Backing up file 6 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T21:31:02.136702/userRoot/parentid.db) [28/Oct/2020:21:31:02.327890909 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/parentid.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T21:31:02.136702/userRoot/parentid.db [28/Oct/2020:21:31:02.330827538 -0400] - INFO - bdb_copy_directory - Backing up file 7 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T21:31:02.136702/userRoot/mail.db) [28/Oct/2020:21:31:02.334248117 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/mail.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T21:31:02.136702/userRoot/mail.db [28/Oct/2020:21:31:02.336816858 -0400] - INFO - bdb_copy_directory - Backing up file 8 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T21:31:02.136702/userRoot/telephoneNumber.db) [28/Oct/2020:21:31:02.339231934 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/telephoneNumber.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T21:31:02.136702/userRoot/telephoneNumber.db [28/Oct/2020:21:31:02.342018338 -0400] - INFO - bdb_copy_directory - Backing up file 9 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T21:31:02.136702/userRoot/cn.db) [28/Oct/2020:21:31:02.345336591 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/cn.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T21:31:02.136702/userRoot/cn.db [28/Oct/2020:21:31:02.348115138 -0400] - INFO - bdb_copy_directory - Backing up file 10 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T21:31:02.136702/userRoot/roomNumber.db) [28/Oct/2020:21:31:02.352896511 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/roomNumber.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T21:31:02.136702/userRoot/roomNumber.db [28/Oct/2020:21:31:02.355636984 -0400] - INFO - bdb_copy_directory - Backing up file 11 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T21:31:02.136702/userRoot/DBVERSION) [28/Oct/2020:21:31:02.358363808 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/DBVERSION to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T21:31:02.136702/userRoot/DBVERSION [28/Oct/2020:21:31:02.360963170 -0400] - INFO - bdb_copy_directory - Backing up file 12 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T21:31:02.136702/userRoot/uid.db) [28/Oct/2020:21:31:02.364069450 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/uid.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T21:31:02.136702/userRoot/uid.db [28/Oct/2020:21:31:02.366578286 -0400] - INFO - bdb_copy_directory - Backing up file 13 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T21:31:02.136702/userRoot/entryrdn.db) [28/Oct/2020:21:31:02.369440944 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/entryrdn.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T21:31:02.136702/userRoot/entryrdn.db [28/Oct/2020:21:31:02.372170510 -0400] - INFO - bdb_copy_directory - Backing up file 14 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T21:31:02.136702/userRoot/id2entry.db) [28/Oct/2020:21:31:02.374632711 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/id2entry.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T21:31:02.136702/userRoot/id2entry.db [28/Oct/2020:21:31:02.377136542 -0400] - INFO - bdb_copy_directory - Backing up file 15 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T21:31:02.136702/userRoot/objectclass.db) [28/Oct/2020:21:31:02.379720722 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/objectclass.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T21:31:02.136702/userRoot/objectclass.db [28/Oct/2020:21:31:02.382845302 -0400] - INFO - upgradedb_core - userRoot: Start upgradedb. [28/Oct/2020:21:31:02.385497125 -0400] - INFO - check_and_set_import_cache - pagesize: 4096, available bytes 7503151104, process usage 22798336 [28/Oct/2020:21:31:02.387571926 -0400] - INFO - check_and_set_import_cache - Import allocates 16384KB import cache. [28/Oct/2020:21:31:02.416176249 -0400] - INFO - bdb_import_main - reindex userRoot: Index buffering enabled with bucket size 17 [28/Oct/2020:21:31:03.121491778 -0400] - INFO - import_monitor_threads - reindex userRoot: Workers finished; cleaning up... [28/Oct/2020:21:31:03.326170878 -0400] - INFO - import_monitor_threads - reindex userRoot: Workers cleaned up. [28/Oct/2020:21:31:03.330389793 -0400] - INFO - bdb_import_main - reindex userRoot: Cleaning up producer thread... [28/Oct/2020:21:31:03.334079055 -0400] - INFO - bdb_import_main - reindex userRoot: Indexing complete. Post-processing... [28/Oct/2020:21:31:03.337760610 -0400] - INFO - bdb_import_main - reindex userRoot: Generating numsubordinates (this may take several minutes to complete)... [28/Oct/2020:21:31:03.341603294 -0400] - INFO - bdb_import_main - reindex userRoot: Generating numSubordinates complete. [28/Oct/2020:21:31:03.345119183 -0400] - INFO - bdb_get_nonleaf_ids - reindex userRoot: Gathering ancestorid non-leaf IDs... [28/Oct/2020:21:31:03.348818172 -0400] - INFO - bdb_get_nonleaf_ids - reindex userRoot: Finished gathering ancestorid non-leaf IDs. [28/Oct/2020:21:31:03.352454651 -0400] - INFO - ldbm_get_nonleaf_ids - reindex userRoot: Starting sort of ancestorid non-leaf IDs... [28/Oct/2020:21:31:03.356723595 -0400] - INFO - ldbm_get_nonleaf_ids - reindex userRoot: Finished sort of ancestorid non-leaf IDs. [28/Oct/2020:21:31:03.364418939 -0400] - INFO - bdb_ancestorid_new_idl_create_index - reindex userRoot: Creating ancestorid index (new idl)... [28/Oct/2020:21:31:03.368597006 -0400] - INFO - bdb_ancestorid_new_idl_create_index - reindex userRoot: Created ancestorid index (new idl). [28/Oct/2020:21:31:03.372342750 -0400] - INFO - bdb_import_main - reindex userRoot: Flushing caches... [28/Oct/2020:21:31:03.375927909 -0400] - INFO - bdb_import_main - reindex userRoot: Closing files... [28/Oct/2020:21:31:03.397355633 -0400] - INFO - bdb_import_main - reindex userRoot: Reindexing complete. Processed 15 entries in 1 seconds. (15.00 entries/sec) [28/Oct/2020:21:31:03.407864247 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/log.0000000001 to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T21:31:02.136702/log.0000000001 [28/Oct/2020:21:31:03.435838020 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/DBVERSION to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2020-10-28T21:31:02.136702/DBVERSION [28/Oct/2020:21:31:03.441104250 -0400] - INFO - bdb_pre_close - All database threads now stopped
Passed suites/import/regression_test.py::test_replay_import_operation 32.46
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.suites.import.regression_test:regression_test.py:94 Exporting LDIF online... INFO  tests.suites.import.regression_test:regression_test.py:53 Run. INFO  tests.suites.import.regression_test:regression_test.py:104 Importing LDIF online, should raise operation error. INFO  tests.suites.import.regression_test:regression_test.py:115 Looping. Tried 1 times so far. INFO  tests.suites.import.regression_test:regression_test.py:57 Adding users. INFO  tests.suites.import.regression_test:regression_test.py:119 Importing LDIF online
Passed suites/import/regression_test.py::test_import_be_default 11.33
-------------------------------Captured log call--------------------------------
INFO  tests.suites.import.regression_test:regression_test.py:143 Adding suffix:dc=default,dc=com and backend: default... INFO  tests.suites.import.regression_test:regression_test.py:149 Create LDIF file and import it... INFO  tests.suites.import.regression_test:regression_test.py:154 Stopping the server and running offline import... INFO  tests.suites.import.regression_test:regression_test.py:160 Verifying entry count after import... INFO  tests.suites.import.regression_test:regression_test.py:166 Test PASSED
Passed suites/import/regression_test.py::test_del_suffix_import 6.93
-------------------------------Captured log call--------------------------------
INFO  tests.suites.import.regression_test:regression_test.py:183 Adding suffix:dc=importest1,dc=com and backend: importest1 INFO  tests.suites.import.regression_test:regression_test.py:188 Create LDIF file and import it INFO  tests.suites.import.regression_test:regression_test.py:194 Stopping the server and running offline import INFO  tests.suites.import.regression_test:regression_test.py:199 Deleting suffix-dc=importest2,dc=com INFO  tests.suites.import.regression_test:regression_test.py:202 Adding the same database-importest1 after deleting it
Passed suites/import/regression_test.py::test_del_suffix_backend 7.20
-------------------------------Captured log call--------------------------------
INFO  tests.suites.import.regression_test:regression_test.py:221 Adding suffix:dc=importest2,dc=com and backend: importest2 INFO  tests.suites.import.regression_test:regression_test.py:226 Create LDIF file and import it INFO  lib389:tasks.py:498 Import task import_10282020_213208 for file /var/lib/dirsrv/slapd-standalone1/ldif/suffix_del2.ldif completed successfully INFO  tests.suites.import.regression_test:regression_test.py:234 Deleting suffix-dc=importest2,dc=com INFO  tests.suites.import.regression_test:regression_test.py:237 Adding the same database-importest2 after deleting it INFO  tests.suites.import.regression_test:regression_test.py:240 Checking if server can be restarted after re-adding the same database
Passed suites/import/regression_test.py::test_import_duplicate_dn 14.93
-------------------------------Captured log call--------------------------------
INFO  tests.suites.import.regression_test:regression_test.py:266 Delete the previous error logs INFO  tests.suites.import.regression_test:regression_test.py:269 Create import file INFO  tests.suites.import.regression_test:regression_test.py:293 Import ldif with duplicate entry ERROR  lib389:tasks.py:495 Error: import task import_10282020_213218 for file /var/lib/dirsrv/slapd-standalone1/ldif/data.ldif exited with -23 INFO  tests.suites.import.regression_test:regression_test.py:296 Restart the server to flush the logs INFO  tests.suites.import.regression_test:regression_test.py:299 Error log should not have "unable to flush" message INFO  tests.suites.import.regression_test:regression_test.py:302 Error log should have "Duplicated DN detected" message
Passed suites/import/regression_test.py::test_large_ldif2db_ancestorid_index_creation 622.20
-------------------------------Captured log call--------------------------------
INFO  tests.suites.import.regression_test:regression_test.py:355 Delete the previous errors logs INFO  tests.suites.import.regression_test:regression_test.py:358 Add suffix:o=test and backend: test... INFO  tests.suites.import.regression_test:regression_test.py:371 Create a large nested ldif file using dbgen : /var/lib/dirsrv/slapd-standalone1/ldif/large_nested.ldif INFO  tests.suites.import.regression_test:regression_test.py:374 Stop the server and run offline import... INFO  tests.suites.import.regression_test:regression_test.py:379 Starting the server INFO  tests.suites.import.regression_test:regression_test.py:382 parse the errors logs to check lines with "Starting sort of ancestorid" are present INFO  tests.suites.import.regression_test:regression_test.py:386 parse the errors logs to check lines with "Finished sort of ancestorid" are present INFO  tests.suites.import.regression_test:regression_test.py:390 parse the error logs for the line with "Gathering ancestorid non-leaf IDs" INFO  tests.suites.import.regression_test:regression_test.py:394 parse the error logs for the line with "Created ancestorid index" INFO  tests.suites.import.regression_test:regression_test.py:398 get the ancestorid non-leaf IDs indexing start and end time from the collected strings INFO  tests.suites.import.regression_test:regression_test.py:404 Calculate the elapsed time for the ancestorid non-leaf IDs index creation
Passed suites/indexes/regression_test.py::test_reindex_task_creates_abandoned_index_file 13.10
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  lib389:tasks.py:798 Index task index_all_10282020_214305 completed successfully INFO  lib389:tasks.py:798 Index task index_all_10282020_214308 completed successfully INFO  lib389:tasks.py:798 Index task index_all_10282020_214315 completed successfully
Passed suites/lib389/config_compare_test.py::test_config_compare 0.01
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38902, 'ldap-secureport': 63602, 'server-id': 'standalone2', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/lib389/dsldapobject/dn_construct_test.py::test_mul_explicit_rdn 0.26
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/lib389/dsldapobject/dn_construct_test.py::test_mul_derive_single_dn 0.01
No log output captured.
Passed suites/lib389/dsldapobject/dn_construct_test.py::test_mul_derive_mult_dn 0.01
No log output captured.
Passed suites/lib389/dsldapobject/dn_construct_test.py::test_sin_explicit_dn 0.26
No log output captured.
Passed suites/lib389/dsldapobject/dn_construct_test.py::test_sin_explicit_rdn 0.01
No log output captured.
Passed suites/lib389/dsldapobject/dn_construct_test.py::test_sin_derive_single_dn 0.01
No log output captured.
Passed suites/lib389/dsldapobject/dn_construct_test.py::test_sin_derive_mult_dn 0.01
No log output captured.
Passed suites/lib389/dsldapobject/dn_construct_test.py::test_sin_invalid_no_basedn 0.00
No log output captured.
Passed suites/lib389/dsldapobject/dn_construct_test.py::test_sin_invalid_no_rdn 0.00
No log output captured.
Passed suites/lib389/dsldapobject/dn_construct_test.py::test_sin_non_present_rdn 0.02
No log output captured.
Passed suites/lib389/idm/user_compare_i2_test.py::test_user_compare_i2 0.31
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38902, 'ldap-secureport': 63602, 'server-id': 'standalone2', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/lib389/idm/user_compare_m2Repl_test.py::test_user_compare_m2Repl 1.06
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 108f00c3-fc90-4afb-a29b-74fd27ee2072 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect d0c0592b-1a08-4e96-8b81-8c5e990c68a7 / got description=108f00c3-fc90-4afb-a29b-74fd27ee2072) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect de9a3e22-0c39-43b7-a545-86374afb0252 / got description=d0c0592b-1a08-4e96-8b81-8c5e990c68a7) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working
Passed suites/lib389/idm/user_compare_st_test.py::test_user_compare 0.31
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/logging/logging_config_test.py::test_logging_digit_config[logexpirationtime-invalid_vals0-valid_vals0] 0.33
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/logging/logging_config_test.py::test_logging_digit_config[maxlogsize-invalid_vals1-valid_vals1] 0.09
No log output captured.
Passed suites/logging/logging_config_test.py::test_logging_digit_config[logmaxdiskspace-invalid_vals2-valid_vals2] 0.08
No log output captured.
Passed suites/logging/logging_config_test.py::test_logging_digit_config[logminfreediskspace-invalid_vals3-valid_vals3] 0.08
No log output captured.
Passed suites/logging/logging_config_test.py::test_logging_digit_config[mode-invalid_vals4-valid_vals4] 0.47
No log output captured.
Passed suites/logging/logging_config_test.py::test_logging_digit_config[maxlogsperdir-invalid_vals5-valid_vals5] 0.20
No log output captured.
Passed suites/logging/logging_config_test.py::test_logging_digit_config[logrotationsynchour-invalid_vals6-valid_vals6] 0.10
No log output captured.
Passed suites/logging/logging_config_test.py::test_logging_digit_config[logrotationsyncmin-invalid_vals7-valid_vals7] 0.08
No log output captured.
Passed suites/logging/logging_config_test.py::test_logging_digit_config[logrotationtime-invalid_vals8-valid_vals8] 0.10
No log output captured.
Passed suites/mapping_tree/acceptance_test.py::test_invalid_mt 0.02
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/mapping_tree/be_del_and_default_naming_attr_test.py::test_be_delete 1.47
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology.
Passed suites/mapping_tree/referral_during_tot_init_test.py::test_referral_during_tot 8.85
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 8eb8b1b7-e038-48d4-962e-5f33461e38be / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 62d5e98f-c115-4429-a25a-dcadbd747878 / got description=8eb8b1b7-e038-48d4-962e-5f33461e38be) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
------------------------------Captured stderr call------------------------------
[28/Oct/2020:21:46:04.165116263 -0400] - INFO - slapd_exemode_ldif2db - Backend Instance: userRoot
Passed suites/memberof_plugin/regression_test.py::test_memberof_with_repl 76.69
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for hub1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39101, 'ldap-secureport': 63801, 'server-id': 'hub1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for consumer1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:524 Creating replication topology. INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39101 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39101 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39101 is NOT working (expect f539dcf3-73b2-461d-b9e2-cbe9726b9c15 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39101 is working INFO  lib389.replica:replica.py:2211 SUCCESS: joined consumer from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39101 INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39101 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is was created INFO  lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39101 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 80b7aad3-f251-42fe-a020-07c9bcf27a14 / got description=f539dcf3-73b2-461d-b9e2-cbe9726b9c15) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is working
-------------------------------Captured log call--------------------------------
INFO  tests.suites.memberof_plugin.regression_test:regression_test.py:77 update cn=101,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config to add nsDS5ReplicatedAttributeListTotal INFO  tests.suites.memberof_plugin.regression_test:regression_test.py:77 update cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config to add nsDS5ReplicatedAttributeListTotal
Passed suites/memberof_plugin/regression_test.py::test_scheme_violation_errors_logged 4.44
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0a33e037-eadc-4d4c-85fd-30e7815aaaeb / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect f2ad49ba-8b1a-404d-bf06-39d0399a9d01 / got description=0a33e037-eadc-4d4c-85fd-30e7815aaaeb) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
-------------------------------Captured log call--------------------------------
INFO  tests.suites.memberof_plugin.regression_test:regression_test.py:319 memberOf attr value - cn=group1,ou=groups,dc=example,dc=com INFO  tests.suites.memberof_plugin.regression_test:regression_test.py:322 pattern = .*oc_check_allowed_sv.*uid=user_,ou=people,dc=example,dc=com.*memberOf.*not allowed.*
Passed suites/memberof_plugin/regression_test.py::test_memberof_with_changelog_reset 49.12
-------------------------------Captured log call--------------------------------
INFO  tests.suites.memberof_plugin.regression_test:regression_test.py:354 Configure memberof on M1 and M2 INFO  tests.suites.memberof_plugin.regression_test:regression_test.py:365 On M1, add 999 test entries allowing memberof INFO  tests.suites.memberof_plugin.regression_test:regression_test.py:51 Adding 999 users INFO  tests.suites.memberof_plugin.regression_test:regression_test.py:368 On M1, add a group with these 999 entries as members INFO  tests.suites.memberof_plugin.regression_test:regression_test.py:376 Adding the test group using async function INFO  tests.suites.memberof_plugin.regression_test:regression_test.py:386 Check the log messages for error INFO  tests.suites.memberof_plugin.regression_test:regression_test.py:390 Check that the replication is working fine both ways, M1 <-> M2 INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect e1eca3a3-ab1c-40c7-8d0d-affcb8429e31 / got description=f2ad49ba-8b1a-404d-bf06-39d0399a9d01) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect d377c5d8-f66f-4ddc-bac0-5bdcf3bb47c2 / got description=e1eca3a3-ab1c-40c7-8d0d-affcb8429e31) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect d377c5d8-f66f-4ddc-bac0-5bdcf3bb47c2 / got description=e1eca3a3-ab1c-40c7-8d0d-affcb8429e31) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working
Passed suites/memberof_plugin/regression_test.py::test_memberof_group 5.21
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.suites.memberof_plugin.regression_test:regression_test.py:481 Enable memberof plugin and set the scope as cn=sub1,dc=example,dc=com INFO  lib389:regression_test.py:440 !!!!!!! uid=test_m1,cn=sub1,dc=example,dc=com: memberof->b'cn=g1,cn=sub1,dc=example,dc=com' INFO  lib389:regression_test.py:440 !!!!!!! uid=test_m2,cn=sub1,dc=example,dc=com: memberof->b'cn=g1,cn=sub1,dc=example,dc=com' INFO  lib389:regression_test.py:440 !!!!!!! uid=test_m1,cn=sub1,dc=example,dc=com: memberof->b'cn=g1,cn=sub1,dc=example,dc=com' INFO  lib389:regression_test.py:440 !!!!!!! uid=test_m2,cn=sub1,dc=example,dc=com: memberof->b'cn=g1,cn=sub1,dc=example,dc=com' CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:427 Renaming user (cn=g2,cn=sub2,dc=example,dc=com): new cn=g2-new INFO  lib389:regression_test.py:440 !!!!!!! uid=test_m1,cn=sub1,dc=example,dc=com: memberof->b'cn=g1,cn=sub1,dc=example,dc=com' INFO  lib389:regression_test.py:440 !!!!!!! uid=test_m2,cn=sub1,dc=example,dc=com: memberof->b'cn=g1,cn=sub1,dc=example,dc=com' INFO  lib389:regression_test.py:440 !!!!!!! uid=test_m1,cn=sub1,dc=example,dc=com: memberof->b'cn=g1,cn=sub1,dc=example,dc=com' INFO  lib389:regression_test.py:440 !!!!!!! uid=test_m1,cn=sub1,dc=example,dc=com: memberof->b'cn=g2-new,cn=sub1,dc=example,dc=com' INFO  lib389:regression_test.py:440 !!!!!!! uid=test_m2,cn=sub1,dc=example,dc=com: memberof->b'cn=g1,cn=sub1,dc=example,dc=com' INFO  lib389:regression_test.py:440 !!!!!!! uid=test_m2,cn=sub1,dc=example,dc=com: memberof->b'cn=g2-new,cn=sub1,dc=example,dc=com'
Passed suites/memberof_plugin/regression_test.py::test_entrycache_on_modrdn_failure 9.40
-------------------------------Captured log call--------------------------------
CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:575 Adding user (cn=user0,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:575 Adding user (cn=user1,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:575 Adding user (cn=user2,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:575 Adding user (cn=user3,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:575 Adding user (cn=user4,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:575 Adding user (cn=user5,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:575 Adding user (cn=user6,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:575 Adding user (cn=user7,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:575 Adding user (cn=user8,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:575 Adding user (cn=user9,ou=people,dc=example,dc=com): INFO  lib389:regression_test.py:596 !!!!!!! cn=user0,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_in0,ou=people,dc=example,dc=com') INFO  lib389:regression_test.py:596 !!!!!!! cn=user1,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_in0,ou=people,dc=example,dc=com') INFO  lib389:regression_test.py:618 !!!!!!! cn=user0,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_out1,dc=example,dc=com') INFO  lib389:regression_test.py:618 !!!!!!! cn=user1,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_out1,dc=example,dc=com') INFO  lib389:regression_test.py:633 !!!!!!! cn=user0,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_in1,ou=people,dc=example,dc=com') INFO  lib389:regression_test.py:633 !!!!!!! cn=user0,ou=people,dc=example,dc=com: memberof->b'cn=group_in1,ou=people,dc=example,dc=com' (vs b'cn=group_in1,ou=people,dc=example,dc=com') INFO  lib389:regression_test.py:633 !!!!!!! cn=user1,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_in1,ou=people,dc=example,dc=com') INFO  lib389:regression_test.py:633 !!!!!!! cn=user1,ou=people,dc=example,dc=com: memberof->b'cn=group_in1,ou=people,dc=example,dc=com' (vs b'cn=group_in1,ou=people,dc=example,dc=com') INFO  lib389:regression_test.py:672 retrieve: cn=group_admin,ou=permissions,dc=example,dc=com with desc=None INFO  lib389:regression_test.py:672 retrieve: cn=group_modify,ou=permissions,dc=example,dc=com with desc=None INFO  lib389:regression_test.py:672 retrieve: cn=group_in0,ou=people,dc=example,dc=com with desc=b'mygroup' INFO  lib389:regression_test.py:672 retrieve: cn=group_in1,ou=people,dc=example,dc=com with desc=b'mygroup' INFO  lib389:regression_test.py:672 retrieve: cn=group_out2,dc=example,dc=com with desc=b'this is to check that the entry having this description has the appropriate DN'
Passed suites/memberof_plugin/regression_test.py::test_silent_memberof_failure 9.87
-------------------------------Captured log call--------------------------------
CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:738 Adding user (cn=user0,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:738 Adding user (cn=user1,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:738 Adding user (cn=user2,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:738 Adding user (cn=user3,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:738 Adding user (cn=user4,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:738 Adding user (cn=user5,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:738 Adding user (cn=user6,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:738 Adding user (cn=user7,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:738 Adding user (cn=user8,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:738 Adding user (cn=user9,ou=people,dc=example,dc=com): INFO  lib389:regression_test.py:759 !!!!!!! cn=user0,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_in0,ou=people,dc=example,dc=com') INFO  lib389:regression_test.py:759 !!!!!!! cn=user1,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_in0,ou=people,dc=example,dc=com') INFO  lib389:regression_test.py:781 !!!!!!! cn=user0,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_out1,dc=example,dc=com') INFO  lib389:regression_test.py:781 !!!!!!! cn=user1,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_out1,dc=example,dc=com') INFO  lib389:regression_test.py:796 !!!!!!! cn=user0,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_in1,ou=people,dc=example,dc=com') INFO  lib389:regression_test.py:796 !!!!!!! cn=user0,ou=people,dc=example,dc=com: memberof->b'cn=group_in1,ou=people,dc=example,dc=com' (vs b'cn=group_in1,ou=people,dc=example,dc=com') INFO  lib389:regression_test.py:796 !!!!!!! cn=user1,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_in1,ou=people,dc=example,dc=com') INFO  lib389:regression_test.py:796 !!!!!!! cn=user1,ou=people,dc=example,dc=com: memberof->b'cn=group_in1,ou=people,dc=example,dc=com' (vs b'cn=group_in1,ou=people,dc=example,dc=com') INFO  lib389:regression_test.py:833 Should assert cn=user2,ou=people,dc=example,dc=com has memberof is False INFO  lib389:regression_test.py:833 Should assert cn=user3,ou=people,dc=example,dc=com has memberof is False INFO  lib389:regression_test.py:856 Should assert cn=user4,ou=people,dc=example,dc=com has memberof is False INFO  lib389:regression_test.py:856 Should assert cn=user5,ou=people,dc=example,dc=com has memberof is False
Passed suites/monitor/monitor_test.py::test_monitor 0.51
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.suites.monitor.monitor_test:monitor_test.py:44 connection: ['1:20201029015038Z:3:2:-:cn=directory manager:0:0:0:1:ip=127.0.0.1'], currentconnections: ['1'], totalconnections: ['1'] INFO  tests.suites.monitor.monitor_test:monitor_test.py:48 version :: ['1:20201029015038Z:6:5:-:cn=directory manager:0:0:0:1:ip=127.0.0.1'] INFO  tests.suites.monitor.monitor_test:monitor_test.py:52 threads: ['16'],currentconnectionsatmaxthreads: ['0'],maxthreadsperconnhits: ['0'] INFO  tests.suites.monitor.monitor_test:monitor_test.py:56 nbackends: ['1'], backendmonitordn: ['cn=monitor,cn=userRoot,cn=ldbm database,cn=plugins,cn=config'] INFO  tests.suites.monitor.monitor_test:monitor_test.py:60 opsinitiated: ['12'], opscompleted: ['13'] INFO  tests.suites.monitor.monitor_test:monitor_test.py:64 dtablesize: ['1024'],readwaiters: ['0'],entriessent: ['14'],bytessent: ['1099'],currenttime: ['20201029015038Z'],starttime: ['20201029015038Z']
Passed suites/monitor/monitor_test.py::test_monitor_ldbm 0.02
No log output captured.
Passed suites/monitor/monitor_test.py::test_monitor_backend 0.01
No log output captured.
Passed suites/openldap_2_389/migrate_test.py::test_parse_openldap_slapdd 0.02
-------------------------------Captured log call--------------------------------
INFO  lib389.migrate.openldap.config:config.py:264 Examining OpenLDAP Configuration ... INFO  lib389.migrate.openldap.config:config.py:285 Completed OpenLDAP Configuration Parsing.
Passed suites/openldap_2_389/migrate_test.py::test_migrate_openldap_slapdd 23.94
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
------------------------------Captured stdout call------------------------------
==== migration plan ==== SchemaAttributeCreate -> ('pseudonym',) SchemaAttributeCreate -> ('email', 'emailAddress', 'pkcs9email') SchemaAttributeCreate -> ('textEncodedORAddress',) SchemaAttributeUnsupported -> ('otherMailbox',) SchemaAttributeCreate -> ('aRecord',) SchemaAttributeCreate -> ('mDRecord',) SchemaAttributeCreate -> ('mXRecord',) SchemaAttributeCreate -> ('nSRecord',) SchemaAttributeCreate -> ('sOARecord',) SchemaAttributeCreate -> ('cNAMERecord',) SchemaAttributeCreate -> ('janetMailbox',) SchemaAttributeCreate -> ('mailPreferenceOption',) SchemaAttributeUnsupported -> ('dSAQuality',) SchemaAttributeUnsupported -> ('singleLevelQuality',) SchemaAttributeUnsupported -> ('subtreeMinimumQuality',) SchemaAttributeUnsupported -> ('subtreeMaximumQuality',) SchemaAttributeCreate -> ('personalSignature',) SchemaAttributeCreate -> ('suseDefaultBase',) SchemaAttributeCreate -> ('suseNextUniqueId',) SchemaAttributeCreate -> ('suseMinUniqueId',) SchemaAttributeCreate -> ('suseMaxUniqueId',) SchemaAttributeCreate -> ('suseDefaultTemplate',) SchemaAttributeCreate -> ('suseSearchFilter',) SchemaAttributeCreate -> ('suseDefaultValue',) SchemaAttributeCreate -> ('suseNamingAttribute',) SchemaAttributeCreate -> ('suseSecondaryGroup',) SchemaAttributeCreate -> ('suseMinPasswordLength',) SchemaAttributeCreate -> ('suseMaxPasswordLength',) SchemaAttributeCreate -> ('susePasswordHash',) SchemaAttributeCreate -> ('suseSkelDir',) SchemaAttributeCreate -> ('susePlugin',) SchemaAttributeCreate -> ('suseMapAttribute',) SchemaAttributeCreate -> ('suseImapServer',) SchemaAttributeCreate -> ('suseImapAdmin',) SchemaAttributeCreate -> ('suseImapDefaultQuota',) SchemaAttributeCreate -> ('suseImapUseSsl',) SchemaClassUnsupported -> 0.9.2342.19200300.100.4.4 ('pilotPerson', 'newPilotPerson') may -> ('userid', 'textEncodedORAddress', 'rfc822Mailbox', 'favouriteDrink', 'roomNumber', 'userClass', 'homeTelephoneNumber', 'homePostalAddress', 'secretary', 'personalTitle', 'preferredDeliveryMethod', 'businessCategory', 'janetMailbox', 'otherMailbox', 'mobileTelephoneNumber', 'pagerTelephoneNumber', 'organizationalStatus', 'mailPreferenceOption', 'personalSignature') must -> () sup -> ('person',) SchemaClassCreate -> 0.9.2342.19200300.100.4.15 ('dNSDomain',) may -> ('ARecord', 'MDRecord', 'MXRecord', 'NSRecord', 'SOARecord', 'CNAMERecord') must -> () sup -> ('domain',) SchemaClassCreate -> 0.9.2342.19200300.100.4.20 ('pilotOrganization',) may -> ('buildingName',) must -> () sup -> ('organization', 'organizationalUnit') SchemaClassUnsupported -> 0.9.2342.19200300.100.4.21 ('pilotDSA',) may -> ('dSAQuality',) must -> () sup -> ('dsa',) SchemaClassUnsupported -> 0.9.2342.19200300.100.4.22 ('qualityLabelledData',) may -> ('subtreeMinimumQuality', 'subtreeMaximumQuality') must -> ('dsaQuality',) sup -> ('top',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:2 ('suseModuleConfiguration',) may -> ('suseDefaultBase',) must -> ('cn',) sup -> ('top',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:3 ('suseUserConfiguration',) may -> ('suseMinPasswordLength', 'suseMaxPasswordLength', 'susePasswordHash', 'suseSkelDir', 'suseNextUniqueId', 'suseMinUniqueId', 'suseMaxUniqueId', 'suseDefaultTemplate', 'suseSearchFilter', 'suseMapAttribute') must -> () sup -> ('suseModuleConfiguration',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:4 ('suseObjectTemplate',) may -> ('susePlugin', 'suseDefaultValue', 'suseNamingAttribute') must -> ('cn',) sup -> ('top',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:5 ('suseUserTemplate',) may -> ('suseSecondaryGroup',) must -> ('cn',) sup -> ('suseObjectTemplate',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:6 ('suseGroupTemplate',) may -> () must -> ('cn',) sup -> ('suseObjectTemplate',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:7 ('suseGroupConfiguration',) may -> ('suseNextUniqueId', 'suseMinUniqueId', 'suseMaxUniqueId', 'suseDefaultTemplate', 'suseSearchFilter', 'suseMapAttribute') must -> () sup -> ('suseModuleConfiguration',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:8 ('suseCaConfiguration',) may -> () must -> () sup -> ('suseModuleConfiguration',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:9 ('suseDnsConfiguration',) may -> () must -> () sup -> ('suseModuleConfiguration',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:10 ('suseDhcpConfiguration',) may -> () must -> () sup -> ('suseModuleConfiguration',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:11 ('suseMailConfiguration',) may -> () must -> ('suseImapServer', 'suseImapAdmin', 'suseImapDefaultQuota', 'suseImapUseSsl') sup -> ('suseModuleConfiguration',) DatabaseReindex -> dc=example,dc=com PluginMemberOfEnable PluginMemberOfScope -> dc=example,dc=com PluginMemberOfFixup -> dc=example,dc=com PluginRefintEnable PluginRefintAttributes -> member PluginRefintAttributes -> memberOf PluginRefintScope -> dc=example,dc=com PluginUniqueConfigure -> dc=example,dc=com, mail 401a528e-eaf5-1039-8667-dbfbf2f5e6dd PluginUniqueConfigure -> dc=example,dc=com, uid 401a528e-eaf5-1039-8667-dbfbf2f5e6dd DatabaseCreate -> dc=example,dc=net, 401a7084-eaf5-1039-866c-dbfbf2f5e6dd DatabaseIndexCreate -> objectClass eq, dc=example,dc=net DatabaseReindex -> dc=example,dc=net PluginMemberOfEnable PluginMemberOfScope -> dc=example,dc=net PluginMemberOfFixup -> dc=example,dc=net PluginUniqueConfigure -> dc=example,dc=net, mail 401a7084-eaf5-1039-866c-dbfbf2f5e6dd PluginUniqueConfigure -> dc=example,dc=net, uid 401a7084-eaf5-1039-866c-dbfbf2f5e6dd DatabaseLdifImport -> dc=example,dc=com /export/tests/suites/openldap_2_389/../../data/openldap_2_389/1/example_com.slapcat.ldif DatabaseLdifImport -> dc=example,dc=net /export/tests/suites/openldap_2_389/../../data/openldap_2_389/1/example_net.slapcat.ldif ==== end migration plan ====
-------------------------------Captured log call--------------------------------
INFO  lib389.migrate.openldap.config:config.py:264 Examining OpenLDAP Configuration ... INFO  lib389.migrate.openldap.config:config.py:285 Completed OpenLDAP Configuration Parsing. INFO  lib389.migrate.plan:plan.py:656 migration: 1 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 2 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 3 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 4 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 5 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 6 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 7 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 8 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 9 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 10 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 11 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 12 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 13 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 14 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 15 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 16 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 17 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 18 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 19 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 20 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 21 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 22 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 23 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 24 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 25 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 26 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 27 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 28 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 29 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 30 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 31 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 32 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 33 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 34 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 35 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 36 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 37 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 38 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 39 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 40 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 41 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 42 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 43 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 44 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 45 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 46 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 47 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 48 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 49 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 50 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 51 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 52 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 53 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 54 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 55 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 56 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 57 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 58 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 59 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 60 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 61 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 62 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 63 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 64 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 65 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 66 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 67 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 68 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 69 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 70 / 71 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 71 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 1 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 2 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 3 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 4 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 5 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 6 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 7 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 8 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 9 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 10 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 11 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 12 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 13 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 14 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 15 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 16 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 17 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 18 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 19 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 20 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 21 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 22 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 23 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 24 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 25 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 26 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 27 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 28 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 29 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 30 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 31 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 32 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 33 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 34 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 35 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 36 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 37 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 38 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 39 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 40 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 41 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 42 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 43 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 44 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 45 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 46 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 47 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 48 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 49 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 50 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 51 / 71 complete ... ERROR  lib389:tasks.py:795 Error: index task index_all_10282020_215109 exited with -1 INFO  lib389.migrate.plan:plan.py:663 post: 52 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 53 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 54 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 55 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 56 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 57 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 58 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 59 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 60 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 61 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 62 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 63 / 71 complete ... ERROR  lib389:tasks.py:795 Error: index task index_all_10282020_215111 exited with -1 INFO  lib389.migrate.plan:plan.py:663 post: 64 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 65 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 66 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 67 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 68 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 69 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 70 / 71 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 71 / 71 complete ...
Passed suites/openldap_2_389/migrate_test.py::test_migrate_openldap_slapdd_skip_elements 14.38
------------------------------Captured stdout call------------------------------
==== migration plan ==== SchemaAttributeUnsupported -> ('otherMailbox',) SchemaAttributeUnsupported -> ('dSAQuality',) SchemaAttributeUnsupported -> ('singleLevelQuality',) SchemaAttributeUnsupported -> ('subtreeMinimumQuality',) SchemaAttributeUnsupported -> ('subtreeMaximumQuality',) SchemaAttributeCreate -> ('suseDefaultBase',) SchemaAttributeCreate -> ('suseNextUniqueId',) SchemaAttributeCreate -> ('suseMinUniqueId',) SchemaAttributeCreate -> ('suseMaxUniqueId',) SchemaAttributeCreate -> ('suseDefaultTemplate',) SchemaAttributeCreate -> ('suseSearchFilter',) SchemaAttributeCreate -> ('suseDefaultValue',) SchemaAttributeCreate -> ('suseNamingAttribute',) SchemaAttributeCreate -> ('suseSecondaryGroup',) SchemaAttributeCreate -> ('suseMinPasswordLength',) SchemaAttributeCreate -> ('suseMaxPasswordLength',) SchemaAttributeCreate -> ('susePasswordHash',) SchemaAttributeCreate -> ('suseSkelDir',) SchemaAttributeCreate -> ('susePlugin',) SchemaAttributeCreate -> ('suseMapAttribute',) SchemaAttributeCreate -> ('suseImapServer',) SchemaAttributeCreate -> ('suseImapAdmin',) SchemaAttributeCreate -> ('suseImapDefaultQuota',) SchemaAttributeCreate -> ('suseImapUseSsl',) SchemaClassUnsupported -> 0.9.2342.19200300.100.4.4 ('pilotPerson', 'newPilotPerson') may -> ('userid', 'textEncodedORAddress', 'rfc822Mailbox', 'favouriteDrink', 'roomNumber', 'userClass', 'homeTelephoneNumber', 'homePostalAddress', 'secretary', 'personalTitle', 'preferredDeliveryMethod', 'businessCategory', 'janetMailbox', 'otherMailbox', 'mobileTelephoneNumber', 'pagerTelephoneNumber', 'organizationalStatus', 'mailPreferenceOption', 'personalSignature') must -> () sup -> ('person',) SchemaClassInconsistent -> ( 0.9.2342.19200300.100.4.20 NAME 'pilotOrganization' SUP organization STRUCTURAL MAY buildingName X-ORIGIN 'user defined' ) to 0.9.2342.19200300.100.4.20 ('pilotOrganization',) may -> ('buildingName',) must -> () sup -> ('organization', 'organizationalUnit') SchemaClassUnsupported -> 0.9.2342.19200300.100.4.21 ('pilotDSA',) may -> ('dSAQuality',) must -> () sup -> ('dsa',) SchemaClassUnsupported -> 0.9.2342.19200300.100.4.22 ('qualityLabelledData',) may -> ('subtreeMinimumQuality', 'subtreeMaximumQuality') must -> ('dsaQuality',) sup -> ('top',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:2 ('suseModuleConfiguration',) may -> ('suseDefaultBase',) must -> ('cn',) sup -> ('top',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:3 ('suseUserConfiguration',) may -> ('suseMinPasswordLength', 'suseMaxPasswordLength', 'susePasswordHash', 'suseSkelDir', 'suseNextUniqueId', 'suseMinUniqueId', 'suseMaxUniqueId', 'suseDefaultTemplate', 'suseSearchFilter', 'suseMapAttribute') must -> () sup -> ('suseModuleConfiguration',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:4 ('suseObjectTemplate',) may -> ('susePlugin', 'suseDefaultValue', 'suseNamingAttribute') must -> ('cn',) sup -> ('top',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:5 ('suseUserTemplate',) may -> ('suseSecondaryGroup',) must -> ('cn',) sup -> ('suseObjectTemplate',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:6 ('suseGroupTemplate',) may -> () must -> ('cn',) sup -> ('suseObjectTemplate',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:7 ('suseGroupConfiguration',) may -> ('suseNextUniqueId', 'suseMinUniqueId', 'suseMaxUniqueId', 'suseDefaultTemplate', 'suseSearchFilter', 'suseMapAttribute') must -> () sup -> ('suseModuleConfiguration',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:8 ('suseCaConfiguration',) may -> () must -> () sup -> ('suseModuleConfiguration',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:9 ('suseDnsConfiguration',) may -> () must -> () sup -> ('suseModuleConfiguration',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:10 ('suseDhcpConfiguration',) may -> () must -> () sup -> ('suseModuleConfiguration',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:11 ('suseMailConfiguration',) may -> () must -> ('suseImapServer', 'suseImapAdmin', 'suseImapDefaultQuota', 'suseImapUseSsl') sup -> ('suseModuleConfiguration',) DatabaseReindex -> dc=example,dc=com PluginMemberOfEnable PluginMemberOfScope -> dc=example,dc=com PluginMemberOfFixup -> dc=example,dc=com PluginRefintEnable PluginRefintAttributes -> member PluginRefintAttributes -> memberOf PluginRefintScope -> dc=example,dc=com PluginUniqueConfigure -> dc=example,dc=com, mail 401a528e-eaf5-1039-8667-dbfbf2f5e6dd PluginUniqueConfigure -> dc=example,dc=com, uid 401a528e-eaf5-1039-8667-dbfbf2f5e6dd DatabaseReindex -> dc=example,dc=net PluginMemberOfEnable PluginMemberOfScope -> dc=example,dc=net PluginMemberOfFixup -> dc=example,dc=net PluginUniqueConfigure -> dc=example,dc=net, mail 401a7084-eaf5-1039-866c-dbfbf2f5e6dd PluginUniqueConfigure -> dc=example,dc=net, uid 401a7084-eaf5-1039-866c-dbfbf2f5e6dd DatabaseLdifImport -> dc=example,dc=com /export/tests/suites/openldap_2_389/../../data/openldap_2_389/1/example_com.slapcat.ldif ==== end migration plan ====
-------------------------------Captured log call--------------------------------
INFO  lib389.migrate.openldap.config:config.py:264 Examining OpenLDAP Configuration ... INFO  lib389.migrate.openldap.config:config.py:285 Completed OpenLDAP Configuration Parsing. INFO  lib389.migrate.plan:plan.py:656 migration: 1 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 2 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 3 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 4 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 5 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 6 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 7 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 8 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 9 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 10 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 11 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 12 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 13 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 14 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 15 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 16 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 17 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 18 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 19 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 20 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 21 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 22 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 23 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 24 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 25 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 26 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 27 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 28 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 29 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 30 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 31 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 32 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 33 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 34 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 35 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 36 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 37 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 38 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 39 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 40 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 41 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 42 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 43 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 44 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 45 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 46 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 47 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 48 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 49 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 50 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 51 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 52 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 53 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 54 / 55 complete ... INFO  lib389.migrate.plan:plan.py:656 migration: 55 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 1 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 2 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 3 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 4 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 5 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 6 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 7 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 8 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 9 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 10 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 11 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 12 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 13 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 14 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 15 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 16 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 17 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 18 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 19 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 20 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 21 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 22 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 23 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 24 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 25 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 26 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 27 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 28 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 29 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 30 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 31 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 32 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 33 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 34 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 35 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 36 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 37 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 38 / 55 complete ... ERROR  lib389:tasks.py:795 Error: index task index_all_10282020_215124 exited with -1 INFO  lib389.migrate.plan:plan.py:663 post: 39 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 40 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 41 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 42 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 43 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 44 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 45 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 46 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 47 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 48 / 55 complete ... ERROR  lib389:tasks.py:795 Error: index task index_all_10282020_215126 exited with -1 INFO  lib389.migrate.plan:plan.py:663 post: 49 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 50 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 51 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 52 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 53 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 54 / 55 complete ... INFO  lib389.migrate.plan:plan.py:663 post: 55 / 55 complete ...
Passed suites/paged_results/paged_results_test.py::test_search_success[6-5] 0.59
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:56 Adding user simplepaged_test
-------------------------------Captured log call--------------------------------
INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 5 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:247 Set user bind simplepaged_test INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 6; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7fd165fb74c0>]. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:253 5 results INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 5 users
Passed suites/paged_results/paged_results_test.py::test_search_success[5-5] 0.24
-------------------------------Captured log call--------------------------------
INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 5 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:247 Set user bind simplepaged_test INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 5; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7fd1764f8880>]. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:253 5 results INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 5 users
Passed suites/paged_results/paged_results_test.py::test_search_success[5-25] 1.01
-------------------------------Captured log call--------------------------------
INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 25 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:247 Set user bind simplepaged_test INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 5; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7fd1756b7d00>]. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:253 25 results INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 25 users
Passed suites/paged_results/paged_results_test.py::test_search_limits_fail[50-200-cn=config,cn=ldbm database,cn=plugins,cn=config-nsslapd-idlistscanlimit-100-UNWILLING_TO_PERFORM] 8.11
-------------------------------Captured log call--------------------------------
INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 200 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-idlistscanlimit to 100. Previous value - b'4000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:299 Set user bind INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:302 Create simple paged results control instance INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:308 Initiate ldapsearch with created control instance INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:320 Getting page 0 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 200 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-idlistscanlimit to b'4000'. Previous value - b'100'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config.
Passed suites/paged_results/paged_results_test.py::test_search_limits_fail[5-15-cn=config-nsslapd-timelimit-20-UNAVAILABLE_CRITICAL_EXTENSION] 30.74
-------------------------------Captured log call--------------------------------
INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 15 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-timelimit to 20. Previous value - b'3600'. Modified suffix - cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:299 Set user bind INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:302 Create simple paged results control instance INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:308 Initiate ldapsearch with created control instance INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:320 Getting page 0 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 15 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-timelimit to b'3600'. Previous value - b'20'. Modified suffix - cn=config.
Passed suites/paged_results/paged_results_test.py::test_search_limits_fail[21-50-cn=config-nsslapd-sizelimit-20-SIZELIMIT_EXCEEDED] 3.29
-------------------------------Captured log call--------------------------------
INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 50 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-sizelimit to 20. Previous value - b'2000'. Modified suffix - cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:299 Set user bind INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:302 Create simple paged results control instance INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:308 Initiate ldapsearch with created control instance INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:320 Getting page 0 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 50 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-sizelimit to b'2000'. Previous value - b'20'. Modified suffix - cn=config.
Passed suites/paged_results/paged_results_test.py::test_search_limits_fail[21-50-cn=config-nsslapd-pagedsizelimit-5-SIZELIMIT_EXCEEDED] 2.33
-------------------------------Captured log call--------------------------------
INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 50 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to 5. Previous value - b'0'. Modified suffix - cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:299 Set user bind INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:302 Create simple paged results control instance INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:308 Initiate ldapsearch with created control instance INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:320 Getting page 0 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 50 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to b'0'. Previous value - b'5'. Modified suffix - cn=config.
Passed suites/paged_results/paged_results_test.py::test_search_limits_fail[5-50-cn=config,cn=ldbm database,cn=plugins,cn=config-nsslapd-lookthroughlimit-20-ADMINLIMIT_EXCEEDED] 1.98
-------------------------------Captured log call--------------------------------
INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 50 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-lookthroughlimit to 20. Previous value - b'5000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:299 Set user bind INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:302 Create simple paged results control instance INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:308 Initiate ldapsearch with created control instance INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:320 Getting page 0 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 50 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-lookthroughlimit to b'5000'. Previous value - b'20'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config.
Passed suites/paged_results/paged_results_test.py::test_search_sort_success 2.03
-------------------------------Captured log call--------------------------------
INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 50 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:378 Initiate ldapsearch with created control instance INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:379 Collect data with sorting INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 5; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7fd17440a880>, <lib389._controls.SSSRequestControl object at 0x7fd17440a400>]. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 6 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 7 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 8 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 9 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:384 Substring numbers from user DNs INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:388 Assert that list is sorted INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 50 users
Passed suites/paged_results/paged_results_test.py::test_search_abandon 5.45
-------------------------------Captured log call--------------------------------
INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 10 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:419 Set user bind INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:422 Create simple paged results control instance INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:426 Initiate a search with a paged results control INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:429 Abandon the search INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:432 Expect an ldap.TIMEOUT exception, while trying to get the search results INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 10 users
Passed suites/paged_results/paged_results_test.py::test_search_with_timelimit 34.75
-------------------------------Captured log call--------------------------------
INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 100 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:468 Set user bind INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:471 Create simple paged results control instance INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:476 Iteration 0 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:483 Getting page 0 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:483 Getting page 1 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:499 Done with this search - sleeping 10 seconds INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:476 Iteration 1 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:483 Getting page 0 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:483 Getting page 1 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:499 Done with this search - sleeping 10 seconds INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:476 Iteration 2 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:483 Getting page 0 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:483 Getting page 1 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:499 Done with this search - sleeping 10 seconds INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 100 users
Passed suites/paged_results/paged_results_test.py::test_search_dns_ip_aci[fqdn] 4.27
-------------------------------Captured log call--------------------------------
INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 100 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:547 Back up current suffix ACI INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:550 Add test ACI INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:556 Set user bind INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:559 Create simple paged results control instance INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:563 Initiate three searches with a paged results control INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:565 1 search INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 5; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7fd1647dc220>]. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 6 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 7 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 8 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 9 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 10 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 11 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 12 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 13 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 14 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 15 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 16 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 17 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 18 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 19 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:568 100 results INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:565 2 search INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 5; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7fd1647dc220>]. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 6 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 7 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 8 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 9 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 10 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 11 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 12 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 13 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 14 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 15 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 16 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 17 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 18 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 19 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:568 100 results INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:565 3 search INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 5; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7fd1647dc220>]. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 6 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 7 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 8 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 9 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 10 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 11 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 12 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 13 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 14 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 15 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 16 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 17 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 18 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 19 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:568 100 results INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:570 If we are here, then no error has happened. We are good. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:573 Restore ACI INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 100 users
Passed suites/paged_results/paged_results_test.py::test_search_dns_ip_aci[ip] 4.15
-------------------------------Captured log call--------------------------------
INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 100 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:547 Back up current suffix ACI INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:550 Add test ACI INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:556 Set user bind INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:559 Create simple paged results control instance INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:563 Initiate three searches with a paged results control INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:565 1 search INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 5; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7fd174e02250>]. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 6 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 7 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 8 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 9 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 10 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 11 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 12 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 13 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 14 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 15 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 16 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 17 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 18 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 19 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:568 100 results INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:565 2 search INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 5; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7fd174e02250>]. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 6 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 7 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 8 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 9 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 10 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 11 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 12 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 13 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 14 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 15 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 16 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 17 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 18 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 19 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:568 100 results INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:565 3 search INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 5; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7fd174e02250>]. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 6 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 7 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 8 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 9 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 10 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 11 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 12 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 13 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 14 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 15 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 16 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 17 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 18 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 19 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:568 100 results INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:570 If we are here, then no error has happened. We are good. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:573 Restore ACI INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 100 users
Passed suites/paged_results/paged_results_test.py::test_search_multiple_paging 4.51
-------------------------------Captured log call--------------------------------
INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 100 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:606 Set user bind INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:609 Create simple paged results control instance INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:614 Iteration 0 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:614 Iteration 1 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:614 Iteration 2 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 100 users
Passed suites/paged_results/paged_results_test.py::test_search_invalid_cookie[1000] 5.59
-------------------------------Captured log call--------------------------------
INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 100 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:661 Set user bind INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:664 Create simple paged results control instance INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:672 Put an invalid cookie (1000) to the control. TypeError is expected INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 100 users
Passed suites/paged_results/paged_results_test.py::test_search_invalid_cookie[-1] 4.62
-------------------------------Captured log call--------------------------------
INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 100 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:661 Set user bind INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:664 Create simple paged results control instance INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:672 Put an invalid cookie (-1) to the control. TypeError is expected INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 100 users
Passed suites/paged_results/paged_results_test.py::test_search_abandon_with_zero_size 0.46
-------------------------------Captured log call--------------------------------
INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 10 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:705 Set user bind INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:708 Create simple paged results control instance INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 10 users
Passed suites/paged_results/paged_results_test.py::test_search_pagedsizelimit_success 0.48
-------------------------------Captured log call--------------------------------
INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to 20. Previous value - b'0'. Modified suffix - cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 10 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:754 Set user bind INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 10; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7fd1648243d0>]. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:762 10 results INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 10 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to b'0'. Previous value - b'20'. Modified suffix - cn=config.
Passed suites/paged_results/paged_results_test.py::test_search_nspagedsizelimit[5-15-PASS] 0.50
-------------------------------Captured log call--------------------------------
INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 10 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to 5. Previous value - b'0'. Modified suffix - cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedSizeLimit to 15. Previous value - None. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:817 Set user bind INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:828 Expect to pass INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 10; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7fd16481e8e0>]. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:830 10 results INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 10 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to b'0'. Previous value - b'5'. Modified suffix - cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedSizeLimit to None. Previous value - b'15'. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com.
Passed suites/paged_results/paged_results_test.py::test_search_nspagedsizelimit[15-5-SIZELIMIT_EXCEEDED] 0.56
-------------------------------Captured log call--------------------------------
INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 10 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to 15. Previous value - b'0'. Modified suffix - cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedSizeLimit to 5. Previous value - None. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:817 Set user bind INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:824 Expect to fail with SIZELIMIT_EXCEEDED INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 10; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7fd16482d280>]. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 10 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to b'0'. Previous value - b'15'. Modified suffix - cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedSizeLimit to None. Previous value - b'5'. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com.
Passed suites/paged_results/paged_results_test.py::test_search_paged_limits[conf_attr_values0-ADMINLIMIT_EXCEEDED] 4.48
-------------------------------Captured log call--------------------------------
INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 101 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-sizelimit to 5000. Previous value - b'2000'. Modified suffix - cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to 5000. Previous value - b'0'. Modified suffix - cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-idlistscanlimit to 100. Previous value - b'4000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-lookthroughlimit to 100. Previous value - b'5000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:889 Set user bind INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:896 Expect to fail with ADMINLIMIT_EXCEEDED INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 10; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7fd16481abb0>]. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 6 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 7 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 101 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-sizelimit to b'2000'. Previous value - b'5000'. Modified suffix - cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to b'0'. Previous value - b'5000'. Modified suffix - cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-lookthroughlimit to b'5000'. Previous value - b'100'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-idlistscanlimit to b'4000'. Previous value - b'100'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config.
Passed suites/paged_results/paged_results_test.py::test_search_paged_user_limits[conf_attr_values0-ADMINLIMIT_EXCEEDED] 4.42
-------------------------------Captured log call--------------------------------
INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 101 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-lookthroughlimit to 1000. Previous value - b'5000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-idlistscanlimit to 1000. Previous value - b'4000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedIDListScanLimit to 100. Previous value - None. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedLookthroughLimit to 100. Previous value - None. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:963 Set user bind INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:970 Expect to fail with ADMINLIMIT_EXCEEDED INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 10; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7fd17445a9a0>]. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 6 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 7 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 101 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-lookthroughlimit to b'5000'. Previous value - b'1000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-idlistscanlimit to b'4000'. Previous value - b'1000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedIDListScanLimit to None. Previous value - b'100'. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedLookthroughLimit to None. Previous value - b'100'. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com.
Passed suites/paged_results/paged_results_test.py::test_ger_basic 0.87
-------------------------------Captured log call--------------------------------
INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 20 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 4; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7fd1761bdca0>, <ldap.controls.simple.GetEffectiveRightsControl object at 0x7fd1761bd910>]. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:1014 20 results INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:1016 Check for attributeLevelRights INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:1019 Remove added users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 20 users
Passed suites/paged_results/paged_results_test.py::test_multi_suffix_search 7.70
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:87 Adding suffix:o=test_parent and backend: parent_base INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:96 Adding ACI to allow our test user to search INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:108 Adding suffix:ou=child,o=test_parent and backend: child_base
-------------------------------Captured log call--------------------------------
INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:1050 Clear the access log INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 10 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 10 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: o=test_parent; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 4; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7fd17ada47f0>]. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:1061 20 results INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:1064 Restart the server to flush the logs INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:1070 Assert that last pr_cookie == -1 and others pr_cookie == 0 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:1075 Remove added users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 10 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 10 users
Passed suites/paged_results/paged_results_test.py::test_maxsimplepaged_per_conn_success[None] 0.86
-------------------------------Captured log call--------------------------------
INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 20 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:1108 Set user bind INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 4; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7fd1660fae20>]. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:1115 20 results INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:1118 Remove added users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 20 users
Passed suites/paged_results/paged_results_test.py::test_maxsimplepaged_per_conn_success[-1] 1.10
-------------------------------Captured log call--------------------------------
INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 20 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-maxsimplepaged-per-conn to -1. Previous value - b'-1'. Modified suffix - cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:1108 Set user bind INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 4; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7fd176423c10>]. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:1115 20 results INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:1118 Remove added users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 20 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-maxsimplepaged-per-conn to b'-1'. Previous value - b'-1'. Modified suffix - cn=config.
Passed suites/paged_results/paged_results_test.py::test_maxsimplepaged_per_conn_success[1000] 1.12
-------------------------------Captured log call--------------------------------
INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 20 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-maxsimplepaged-per-conn to 1000. Previous value - b'-1'. Modified suffix - cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:1108 Set user bind INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 4; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7fd1663613d0>]. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:1115 20 results INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:1118 Remove added users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 20 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-maxsimplepaged-per-conn to b'-1'. Previous value - b'1000'. Modified suffix - cn=config.
Passed suites/paged_results/paged_results_test.py::test_maxsimplepaged_per_conn_failure[0] 0.94
-------------------------------Captured log call--------------------------------
INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 20 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-maxsimplepaged-per-conn to 0. Previous value - b'-1'. Modified suffix - cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:1153 Set user bind INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:1156 Create simple paged results control instance INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:1171 Remove added users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 20 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-maxsimplepaged-per-conn to b'-1'. Previous value - b'0'. Modified suffix - cn=config.
Passed suites/paged_results/paged_results_test.py::test_maxsimplepaged_per_conn_failure[1] 0.89
-------------------------------Captured log call--------------------------------
INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 20 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-maxsimplepaged-per-conn to 1. Previous value - b'-1'. Modified suffix - cn=config. INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:1153 Set user bind INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:1156 Create simple paged results control instance INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:1171 Remove added users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 20 users INFO  tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-maxsimplepaged-per-conn to b'-1'. Previous value - b'1'. Modified suffix - cn=config.
Passed suites/password/password_policy_test.py::test_password_change_section 1.01
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/password/password_policy_test.py::test_password_syntax_section 2.68
No log output captured.
Passed suites/password/password_policy_test.py::test_password_history_section 8.78
No log output captured.
Passed suites/password/password_policy_test.py::test_password_minimum_age_section 11.47
No log output captured.
Passed suites/password/password_policy_test.py::test_account_lockout_and_lockout_duration_section 6.30
No log output captured.
Passed suites/password/password_policy_test.py::test_grace_limit_section 17.22
No log output captured.
Passed suites/password/password_policy_test.py::test_additional_corner_cases 0.87
No log output captured.
Passed suites/password/password_test.py::test_password_delete_specific_password 0.07
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.password_test:password_test.py:44 Running test_password_delete_specific_password... INFO  tests.suites.password.password_test:password_test.py:65 test_password_delete_specific_password: PASSED
Passed suites/password/pbkdf2_upgrade_plugin_test.py::test_pbkdf2_upgrade 9.16
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/password/pwdAdmin_test.py::test_pwdAdmin_bypass 0.14
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO  tests.suites.password.pwdAdmin_test:pwdAdmin_test.py:46 test_pwdAdmin_init: Creating Password Administrator entries... INFO  tests.suites.password.pwdAdmin_test:pwdAdmin_test.py:85 test_pwdAdmin_init: Configuring password policy... INFO  tests.suites.password.pwdAdmin_test:pwdAdmin_test.py:100 Add aci to allow password admin to add/update entries... INFO  tests.suites.password.pwdAdmin_test:pwdAdmin_test.py:115 test_pwdAdmin_init: Bind as the Password Administrator (before activating)... INFO  tests.suites.password.pwdAdmin_test:pwdAdmin_test.py:128 test_pwdAdmin_init: Attempt to add entries with invalid passwords, these adds should fail...
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwdAdmin_test:pwdAdmin_test.py:167 test_pwdAdmin: Activate the Password Administator...
Passed suites/password/pwdAdmin_test.py::test_pwdAdmin_no_admin 0.08
No log output captured.
Passed suites/password/pwdAdmin_test.py::test_pwdAdmin_modify 0.14
No log output captured.
Passed suites/password/pwdAdmin_test.py::test_pwdAdmin_group 0.14
No log output captured.
Passed suites/password/pwdAdmin_test.py::test_pwdAdmin_config_validation 0.02
No log output captured.
Passed suites/password/pwdModify_test.py::test_pwd_modify_with_different_operation 12.03
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwdModify_test:pwdModify_test.py:101 Attempt for Password change for an entry that does not exists INFO  tests.suites.password.pwdModify_test:pwdModify_test.py:103 Attempt for Password change for an entry that exists INFO  tests.suites.password.pwdModify_test:pwdModify_test.py:105 Attempt for Password change to old for an entry that exists INFO  tests.suites.password.pwdModify_test:pwdModify_test.py:107 Attempt for Password Change with Binddn as testuser but with wrong old password INFO  tests.suites.password.pwdModify_test:pwdModify_test.py:111 Attempt for Password Change with Binddn as testuser INFO  tests.suites.password.pwdModify_test:pwdModify_test.py:113 Attempt for Password Change without giving newpassword INFO  tests.suites.password.pwdModify_test:pwdModify_test.py:116 Change password to NEW_PASSWD i.e newpassword INFO  tests.suites.password.pwdModify_test:pwdModify_test.py:119 Check binding with old/new password INFO  tests.suites.password.pwdModify_test:pwdModify_test.py:124 Change password back to OLD_PASSWD i.e password INFO  tests.suites.password.pwdModify_test:pwdModify_test.py:127 Checking password change Operation using a Non-Secure connection INFO  tests.suites.password.pwdModify_test:pwdModify_test.py:131 Testuser attempts to change password for testuser2(userPassword attribute is Set) INFO  tests.suites.password.pwdModify_test:pwdModify_test.py:147 Directory Manager attempts to change password for testuser2(userPassword attribute is Set) INFO  tests.suites.password.pwdModify_test:pwdModify_test.py:150 Changing userPassword attribute to Undefined for testuser2 INFO  tests.suites.password.pwdModify_test:pwdModify_test.py:152 Testuser attempts to change password for testuser2(userPassword attribute is Undefined) INFO  tests.suites.password.pwdModify_test:pwdModify_test.py:156 Directory Manager attempts to change password for testuser2(userPassword attribute is Undefined) INFO  tests.suites.password.pwdModify_test:pwdModify_test.py:159 Create a password syntax policy. Attempt to change to password that violates that policy INFO  tests.suites.password.pwdModify_test:pwdModify_test.py:163 Reset password syntax policy INFO  tests.suites.password.pwdModify_test:pwdModify_test.py:165 userPassword mod with control results in ber decode error INFO  tests.suites.password.pwdModify_test:pwdModify_test.py:169 Reseting the testuser's password
Passed suites/password/pwdModify_test.py::test_pwd_modify_with_password_policy 0.12
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.password.pwdModify_test:pwdModify_test.py:44 Change the pwd storage type to clear and change the password once to refresh it(for the rest of tests
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwdModify_test:pwdModify_test.py:192 Change the password and check that a new entry has been added to the history INFO  tests.suites.password.pwdModify_test:pwdModify_test.py:196 Try changing password to one stored in history. Should fail INFO  tests.suites.password.pwdModify_test:pwdModify_test.py:199 Change the password several times in a row, and try binding after each change
Passed suites/password/pwdModify_test.py::test_pwd_modify_with_subsuffix 0.33
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwdModify_test:pwdModify_test.py:228 Add a new SubSuffix INFO  tests.suites.password.pwdModify_test:pwdModify_test.py:235 Add the container & create password policies INFO  tests.suites.password.pwdModify_test:pwdModify_test.py:243 Add two New users under the SubEntry INFO  tests.suites.password.pwdModify_test:pwdModify_test.py:265 Changing password of uid=test_user0,ou=TestPeople_bug834047,dc=example,dc=com to newpassword INFO  tests.suites.password.pwdModify_test:pwdModify_test.py:270 Try to delete password- case when password is specified INFO  tests.suites.password.pwdModify_test:pwdModify_test.py:274 Try to delete password- case when password is not specified
Passed suites/password/pwdPolicy_attribute_test.py::test_pwd_reset 1.65
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO  tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:39 Adding test user {}
Passed suites/password/pwdPolicy_attribute_test.py::test_change_pwd[on-off-UNWILLING_TO_PERFORM] 2.35
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:66 Create password policy for subtree ou=people,dc=example,dc=com INFO  tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:69 Create password policy for user uid=simplepaged_test,ou=people,dc=example,dc=com
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:154 Set passwordChange to "on" - ou=people,dc=example,dc=com INFO  tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:161 Set passwordChange to "off" - uid=simplepaged_test,ou=people,dc=example,dc=com INFO  tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:170 Bind as user and modify userPassword INFO  tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:182 Bind as DM
Passed suites/password/pwdPolicy_attribute_test.py::test_change_pwd[off-off-UNWILLING_TO_PERFORM] 2.09
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:154 Set passwordChange to "off" - ou=people,dc=example,dc=com INFO  tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:161 Set passwordChange to "off" - uid=simplepaged_test,ou=people,dc=example,dc=com INFO  tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:170 Bind as user and modify userPassword INFO  tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:182 Bind as DM
Passed suites/password/pwdPolicy_attribute_test.py::test_change_pwd[off-on-False] 2.15
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:154 Set passwordChange to "off" - ou=people,dc=example,dc=com INFO  tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:161 Set passwordChange to "on" - uid=simplepaged_test,ou=people,dc=example,dc=com INFO  tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:170 Bind as user and modify userPassword INFO  tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:182 Bind as DM
Passed suites/password/pwdPolicy_attribute_test.py::test_change_pwd[on-on-False] 2.12
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:154 Set passwordChange to "on" - ou=people,dc=example,dc=com INFO  tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:161 Set passwordChange to "on" - uid=simplepaged_test,ou=people,dc=example,dc=com INFO  tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:170 Bind as user and modify userPassword INFO  tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:182 Bind as DM
Passed suites/password/pwdPolicy_attribute_test.py::test_pwd_min_age 14.18
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:219 Set passwordminage to "10" - ou=people,dc=example,dc=com INFO  tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:224 Set passwordminage to "10" - uid=simplepaged_test,ou=people,dc=example,dc=com INFO  tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:228 Set passwordminage to "10" - cn=config INFO  tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:233 Bind as user and modify userPassword INFO  tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:239 Bind as user and modify userPassword straight away after previous change INFO  tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:244 Wait 12 second INFO  tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:248 Bind as user and modify userPassword INFO  tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:256 Bind as DM
Passed suites/password/pwdPolicy_controls_test.py::test_pwd_must_change 2.12
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:109 Configure password policy with paswordMustChange set to "on" INFO  tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:120 Reset userpassword as Directory Manager INFO  tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:125 Bind should return ctrl with error code 2 (changeAfterReset)
Passed suites/password/pwdPolicy_controls_test.py::test_pwd_expired_grace_limit 6.29
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:159 Configure password policy with grace limit set tot 2 INFO  tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:164 Change password and wait for it to expire INFO  tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:168 Bind and use up one grace login (only one left) INFO  tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:178 Use up last grace login, should get control INFO  tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:184 No grace login available, bind should fail, and no control should be returned
Passed suites/password/pwdPolicy_controls_test.py::test_pwd_expiring_with_warning 5.20
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:206 Configure password policy INFO  tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:211 Change password and get controls INFO  tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:222 Warning has been sent, try the bind again, and recheck the expiring time
Passed suites/password/pwdPolicy_controls_test.py::test_pwd_expiring_with_no_warning 6.26
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:250 Configure password policy INFO  tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:255 When the warning is less than the max age, we never send expiring control response INFO  tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:262 Turn on sending expiring control regardless of warning INFO  tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:274 Check expiring time again INFO  tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:285 Turn off sending expiring control (restore the default setting)
Passed suites/password/pwdPolicy_inherit_global_test.py::test_entry_has_no_restrictions[off-off] 1.08
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO  tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:39 Adding user uid=buser,ou=People,dc=example,dc=com INFO  tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:46 Adding an aci for the bind user INFO  tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:71 Enable fine-grained policy INFO  tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:92 Default value of nsslapd-pwpolicy-inherit-global is off INFO  tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:92 Default value of passwordCheckSyntax is off
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:119 Set nsslapd-pwpolicy-inherit-global to off INFO  tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:120 Set passwordCheckSyntax to off INFO  tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:92 Default value of nsslapd-pwpolicy-inherit-global is off INFO  tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:92 Default value of passwordCheckSyntax is off INFO  tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:129 Bind as test user INFO  tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:132 Make sure an entry added to ou=people has no password syntax restrictions.
Passed suites/password/pwdPolicy_inherit_global_test.py::test_entry_has_no_restrictions[on-off] 1.04
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:119 Set nsslapd-pwpolicy-inherit-global to on INFO  tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:120 Set passwordCheckSyntax to off INFO  tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:92 Default value of nsslapd-pwpolicy-inherit-global is on INFO  tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:92 Default value of passwordCheckSyntax is off INFO  tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:129 Bind as test user INFO  tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:132 Make sure an entry added to ou=people has no password syntax restrictions.
Passed suites/password/pwdPolicy_inherit_global_test.py::test_entry_has_no_restrictions[off-on] 1.06
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:119 Set nsslapd-pwpolicy-inherit-global to off INFO  tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:120 Set passwordCheckSyntax to on INFO  tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:92 Default value of nsslapd-pwpolicy-inherit-global is off INFO  tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:92 Default value of passwordCheckSyntax is on INFO  tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:129 Bind as test user INFO  tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:132 Make sure an entry added to ou=people has no password syntax restrictions.
Passed suites/password/pwdPolicy_inherit_global_test.py::test_entry_has_restrictions 1.32
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:173 Set nsslapd-pwpolicy-inherit-global to on INFO  tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:174 Set passwordCheckSyntax to on INFO  tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:92 Default value of nsslapd-pwpolicy-inherit-global is on INFO  tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:92 Default value of passwordCheckSyntax is on INFO  tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:187 Bind as test user INFO  tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:192 Try to add user with a short password (<9) INFO  tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:197 Try to add user with a long password (>9) INFO  tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:201 Bind as DM user
Passed suites/password/pwdPolicy_syntax_test.py::test_basic 5.86
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:34 Enable global password policy. Check for syntax.
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMinLength: length too short INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMinDigits: does not contain minimum number of digits INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMinAlphas: does not contain minimum number of alphas INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMaxRepeats: too many repeating characters INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMinSpecials: does not contain minimum number of special characters INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMinLowers: does not contain minimum number of lowercase characters INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMinUppers: does not contain minimum number of lowercase characters INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordDictCheck: Password found in dictionary INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordPalindrome: Password is palindrome INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMaxSequence: Max monotonic sequence is not allowed INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMaxSequence: Max monotonic sequence is not allowed INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMaxSequence: Max monotonic sequence is not allowed INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMaxSequence: Max monotonic sequence is not allowed INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMaxSeqSets: Max monotonic sequence is not allowed INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMaxClassChars: Too may consecutive characters from the same class INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMaxClassChars: Too may consecutive characters from the same class INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMaxClassChars: Too may consecutive characters from the same class INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMaxClassChars: Too may consecutive characters from the same class INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordBadWords: Too may consecutive characters from the same class INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordUserAttributes: Password found in user entry
Passed suites/password/pwdPolicy_syntax_test.py::test_config_set_few_user_attributes 6.39
-------------------------------Captured log call--------------------------------
INFO  lib389:pwdPolicy_syntax_test.py:315 Set passwordUserAttributes to "description loginShell" INFO  lib389:pwdPolicy_syntax_test.py:319 Verify passwordUserAttributes has the values INFO  lib389:pwdPolicy_syntax_test.py:323 Reset passwordUserAttributes INFO  lib389:pwdPolicy_syntax_test.py:326 Verify passwordUserAttributes enforced the policy INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordUserAttributes: Password found in user entry INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordUserAttributes: Password found in user entry INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordUserAttributes: Password found in user entry INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordUserAttributes: Password found in user entry INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordUserAttributes: Password found in user entry INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordUserAttributes: Password found in user entry INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordUserAttributes: Password found in user entry INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordUserAttributes: Password found in user entry INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordUserAttributes: Password found in user entry
Passed suites/password/pwdPolicy_syntax_test.py::test_config_set_few_bad_words 6.96
-------------------------------Captured log call--------------------------------
INFO  lib389:pwdPolicy_syntax_test.py:355 Set passwordBadWords to "fedora redhat" INFO  lib389:pwdPolicy_syntax_test.py:360 Verify passwordBadWords has the values INFO  lib389:pwdPolicy_syntax_test.py:364 Reset passwordBadWords INFO  lib389:pwdPolicy_syntax_test.py:367 Verify passwordBadWords enforced the policy INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordBadWords: Too may consecutive characters from the same class INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordBadWords: Too may consecutive characters from the same class INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordBadWords: Too may consecutive characters from the same class INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordBadWords: Too may consecutive characters from the same class INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordBadWords: Too may consecutive characters from the same class INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordBadWords: Too may consecutive characters from the same class INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordBadWords: Too may consecutive characters from the same class INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordBadWords: Too may consecutive characters from the same class INFO  tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordBadWords: Too may consecutive characters from the same class
Passed suites/password/pwdPolicy_token_test.py::test_token_lengths 3.76
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwdPolicy_token_test:pwdPolicy_token_test.py:67 Testing password len 4 token (test) INFO  tests.suites.password.pwdPolicy_token_test:pwdPolicy_token_test.py:72 Password correctly rejected: {'msgtype': 103, 'msgid': 12, 'result': 19, 'desc': 'Constraint violation', 'ctrls': [], 'info': 'invalid password syntax - password based off of user entry'} INFO  tests.suites.password.pwdPolicy_token_test:pwdPolicy_token_test.py:67 Testing password len 6 token (test_u) INFO  tests.suites.password.pwdPolicy_token_test:pwdPolicy_token_test.py:72 Password correctly rejected: {'msgtype': 103, 'msgid': 16, 'result': 19, 'desc': 'Constraint violation', 'ctrls': [], 'info': 'invalid password syntax - password based off of user entry'} INFO  tests.suites.password.pwdPolicy_token_test:pwdPolicy_token_test.py:67 Testing password len 10 token (test_user1) INFO  tests.suites.password.pwdPolicy_token_test:pwdPolicy_token_test.py:72 Password correctly rejected: {'msgtype': 103, 'msgid': 20, 'result': 19, 'desc': 'Constraint violation', 'ctrls': [], 'info': 'invalid password syntax - password based off of user entry'}
Passed suites/password/pwdPolicy_warning_test.py::test_different_values[ ] 0.01
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:205 Get the default value INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:209 An invalid value is being tested INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:171 Setting passwordSendExpiringTime to INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:213 Now check the value is unchanged INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:216 Invalid value was rejected correctly
Passed suites/password/pwdPolicy_warning_test.py::test_different_values[junk123] 0.26
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:205 Get the default value INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:209 An invalid value is being tested INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:171 Setting passwordSendExpiringTime to junk123 INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:213 Now check the value is unchanged INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:216 Invalid value junk123 was rejected correctly
Passed suites/password/pwdPolicy_warning_test.py::test_different_values[on] 1.27
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:205 Get the default value INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:218 A valid value is being tested INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:171 Setting passwordSendExpiringTime to on INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:221 Now check that the value has been changed INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:224 passwordSendExpiringTime is now set to on INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:226 Set passwordSendExpiringTime back to the default value INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:171 Setting passwordSendExpiringTime to off
Passed suites/password/pwdPolicy_warning_test.py::test_different_values[off] 1.27
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:205 Get the default value INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:218 A valid value is being tested INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:171 Setting passwordSendExpiringTime to off INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:221 Now check that the value has been changed INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:224 passwordSendExpiringTime is now set to off INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:226 Set passwordSendExpiringTime back to the default value INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:171 Setting passwordSendExpiringTime to off
Passed suites/password/pwdPolicy_warning_test.py::test_expiry_time 0.05
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:46 Get the default values INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:53 Set the new values INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:113 Add the user
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:257 Get the password expiry warning time INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:258 Binding with (uid=tuser,ou=people,dc=example,dc=com) and requesting the password expiry warning time INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:158 Bind with the user and request the password expiry warning time INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:262 Check whether the time is returned INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:265 user's password will expire in 172800 seconds INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:268 Rebinding as DM
Passed suites/password/pwdPolicy_warning_test.py::test_password_warning[passwordSendExpiringTime-off] 0.54
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:46 Get the default values INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:53 Set the new values INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:113 Add the user
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:300 Set configuration parameter INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:171 Setting passwordSendExpiringTime to off INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:303 Binding with (uid=tuser,ou=people,dc=example,dc=com) and requesting password expiry warning time INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:158 Bind with the user and request the password expiry warning time INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:307 Check the state of the control INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:309 Password Expiry warning time is not returned as passwordSendExpiringTime is set to off INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:315 Rebinding as DM
Passed suites/password/pwdPolicy_warning_test.py::test_password_warning[passwordWarning-3600] 0.54
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:46 Get the default values INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:53 Set the new values INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:113 Add the user
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:300 Set configuration parameter INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:171 Setting passwordWarning to 3600 INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:303 Binding with (uid=tuser,ou=people,dc=example,dc=com) and requesting password expiry warning time INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:158 Bind with the user and request the password expiry warning time INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:307 Check the state of the control INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:312 (uid=tuser,ou=people,dc=example,dc=com) password will expire in 172799 seconds INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:315 Rebinding as DM
Passed suites/password/pwdPolicy_warning_test.py::test_with_different_password_states 0.09
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:46 Get the default values INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:53 Set the new values INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:113 Add the user
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:348 Expire user's password by changing passwordExpirationTime timestamp INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:352 Old passwordExpirationTime: 20201031015851Z INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:355 New passwordExpirationTime: 20200930015851Z INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:358 Attempting to bind with user uid=tuser,ou=people,dc=example,dc=com and retrive the password expiry warning time INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:158 Bind with the user and request the password expiry warning time INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:362 Bind Failed, error: <ExceptionInfo INVALID_CREDENTIALS({'msgtype': 97, 'msgid': 73, 'result': 49, 'desc': 'Invalid credentials', 'ctrls': [('1.3.6.1.4.1.....8.5.1', 0, b'0\x84\x00\x00\x00\x03\x81\x01\x00'), ('2.16.840.1.113730.3.4.4', 0, b'0')], 'info': 'password expired!'}) tblen=10> INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:364 Rebinding as DM INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:367 Reverting back user's passwordExpirationTime INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:371 Rebinding with uid=tuser,ou=people,dc=example,dc=com and retrieving the password expiry warning time INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:158 Bind with the user and request the password expiry warning time INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:374 Check that the control is returned INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:377 user's password will expire in 172800 seconds INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:380 Rebinding as DM
Passed suites/password/pwdPolicy_warning_test.py::test_default_behavior 0.03
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:84 Get the default values INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:90 Set the new values INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:113 Add the user
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:406 Binding with uid=tuser,ou=people,dc=example,dc=com and requesting the password expiry warning time INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:158 Bind with the user and request the password expiry warning time INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:410 Check that no control is returned INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:413 Rebinding as DM
Passed suites/password/pwdPolicy_warning_test.py::test_when_maxage_and_warning_are_the_same 2.08
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:84 Get the default values INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:90 Set the new values INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:113 Add the user
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:442 Set the new values INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:446 First change user's password to reset its password expiration time INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:453 Binding with uid=tuser,ou=people,dc=example,dc=com and requesting the password expiry warning time INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:158 Bind with the user and request the password expiry warning time INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:457 Check that control is returned evenif passwordSendExpiringTime is set to off INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:461 user's password will expire in 86400 seconds INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:463 Rebinding as DM
Passed suites/password/pwdPolicy_warning_test.py::test_with_local_policy 0.28
-----------------------------Captured stdout setup------------------------------
Successfully created user password policy
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:46 Get the default values INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:53 Set the new values INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:113 Add the user INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:141 Setting fine grained policy for user (uid=tuser,ou=people,dc=example,dc=com)
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:490 Attempting to get password expiry warning time for user uid=tuser,ou=people,dc=example,dc=com INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:158 Bind with the user and request the password expiry warning time INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:493 Check that the control is not returned INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:496 Password expiry warning time is not returned INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:498 Rebinding as DM
Passed suites/password/pwdPolicy_warning_test.py::test_search_shadowWarning_when_passwordWarning_is_lower 0.12
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:46 Get the default values INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:53 Set the new values
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:530 Bind as cn=Directory Manager INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:533 Creating test user INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:538 Setting passwordWarning to smaller value than 86400 INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:541 Bind as test user INFO  tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:544 Check if attribute shadowWarning is present
Passed suites/password/pwdPolicy_warning_test.py::test_password_expire_works 1.62
No log output captured.
Passed suites/password/pwd_algo_test.py::test_pwd_algo_test[CLEAR] 0.32
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwd_algo_test:pwd_algo_test.py:138 Test CLEAR PASSED
Passed suites/password/pwd_algo_test.py::test_pwd_algo_test[CRYPT] 0.05
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwd_algo_test:pwd_algo_test.py:138 Test CRYPT PASSED
Passed suites/password/pwd_algo_test.py::test_pwd_algo_test[CRYPT-MD5] 0.05
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwd_algo_test:pwd_algo_test.py:138 Test CRYPT-MD5 PASSED
Passed suites/password/pwd_algo_test.py::test_pwd_algo_test[CRYPT-SHA256] 0.08
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwd_algo_test:pwd_algo_test.py:138 Test CRYPT-SHA256 PASSED
Passed suites/password/pwd_algo_test.py::test_pwd_algo_test[CRYPT-SHA512] 0.08
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwd_algo_test:pwd_algo_test.py:138 Test CRYPT-SHA512 PASSED
Passed suites/password/pwd_algo_test.py::test_pwd_algo_test[MD5] 0.17
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwd_algo_test:pwd_algo_test.py:138 Test MD5 PASSED
Passed suites/password/pwd_algo_test.py::test_pwd_algo_test[SHA] 0.09
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwd_algo_test:pwd_algo_test.py:138 Test SHA PASSED
Passed suites/password/pwd_algo_test.py::test_pwd_algo_test[SHA256] 0.07
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwd_algo_test:pwd_algo_test.py:138 Test SHA256 PASSED
Passed suites/password/pwd_algo_test.py::test_pwd_algo_test[SHA384] 0.06
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwd_algo_test:pwd_algo_test.py:138 Test SHA384 PASSED
Passed suites/password/pwd_algo_test.py::test_pwd_algo_test[SHA512] 0.06
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwd_algo_test:pwd_algo_test.py:138 Test SHA512 PASSED
Passed suites/password/pwd_algo_test.py::test_pwd_algo_test[SMD5] 0.07
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwd_algo_test:pwd_algo_test.py:138 Test SMD5 PASSED
Passed suites/password/pwd_algo_test.py::test_pwd_algo_test[SSHA] 0.20
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwd_algo_test:pwd_algo_test.py:138 Test SSHA PASSED
Passed suites/password/pwd_algo_test.py::test_pwd_algo_test[SSHA256] 0.15
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwd_algo_test:pwd_algo_test.py:138 Test SSHA256 PASSED
Passed suites/password/pwd_algo_test.py::test_pwd_algo_test[SSHA384] 0.06
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwd_algo_test:pwd_algo_test.py:138 Test SSHA384 PASSED
Passed suites/password/pwd_algo_test.py::test_pwd_algo_test[SSHA512] 0.06
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwd_algo_test:pwd_algo_test.py:138 Test SSHA512 PASSED
Passed suites/password/pwd_algo_test.py::test_pwd_algo_test[PBKDF2_SHA256] 0.48
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwd_algo_test:pwd_algo_test.py:138 Test PBKDF2_SHA256 PASSED
Passed suites/password/pwd_algo_test.py::test_pwd_algo_test[DEFAULT] 0.24
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwd_algo_test:pwd_algo_test.py:138 Test DEFAULT PASSED
Passed suites/password/pwd_algo_test.py::test_pbkdf2_algo 2.56
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwd_algo_test:pwd_algo_test.py:170 Test PASSED
Passed suites/password/pwd_lockout_bypass_test.py::test_lockout_bypass 0.41
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/password/pwd_log_test.py::test_hide_unhashed_pwd 4.39
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwd_log_test:pwd_log_test.py:79 Test complete
Passed suites/password/pwp_gracel_test.py::test_password_gracelimit_section 13.83
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/password/pwp_history_test.py::test_history_is_not_overwritten 4.40
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwp_history_test:pwp_history_test.py:77 Configured password policy.
Passed suites/password/pwp_history_test.py::test_basic 7.33
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.pwp_history_test:pwp_history_test.py:152 Configured password policy. INFO  tests.suites.password.pwp_history_test:pwp_history_test.py:170 Password change correctly rejected INFO  tests.suites.password.pwp_history_test:pwp_history_test.py:201 Correct number of passwords found in history. INFO  tests.suites.password.pwp_history_test:pwp_history_test.py:212 Password change correctly rejected INFO  tests.suites.password.pwp_history_test:pwp_history_test.py:222 Password change correctly rejected INFO  tests.suites.password.pwp_history_test:pwp_history_test.py:232 Password change correctly rejected INFO  tests.suites.password.pwp_history_test:pwp_history_test.py:254 Password change correctly rejected INFO  tests.suites.password.pwp_history_test:pwp_history_test.py:267 Configured passwordInHistory to 0. INFO  tests.suites.password.pwp_history_test:pwp_history_test.py:283 Password change correctly rejected INFO  tests.suites.password.pwp_history_test:pwp_history_test.py:299 Configured passwordInHistory to 2. INFO  tests.suites.password.pwp_history_test:pwp_history_test.py:312 Password change correctly rejected INFO  tests.suites.password.pwp_history_test:pwp_history_test.py:326 Test suite PASSED.
Passed suites/password/pwp_test.py::test_passwordchange_to_no 0.57
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/password/pwp_test.py::test_password_check_syntax 0.28
No log output captured.
Passed suites/password/pwp_test.py::test_too_big_password 0.62
No log output captured.
Passed suites/password/pwp_test.py::test_pwminage 3.12
No log output captured.
Passed suites/password/pwp_test.py::test_invalid_credentials 7.31
No log output captured.
Passed suites/password/pwp_test.py::test_expiration_date 1.22
No log output captured.
Passed suites/password/pwp_test.py::test_passwordlockout 2.40
No log output captured.
Passed suites/password/regression_of_bugs_test.py::test_local_password_policy 0.12
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/password/regression_of_bugs_test.py::test_passwordexpirationtime_attribute 3.30
No log output captured.
Passed suites/password/regression_of_bugs_test.py::test_admin_group_to_modify_password 0.99
No log output captured.
Passed suites/password/regression_of_bugs_test.py::test_password_max_failure_should_lockout_password 0.12
No log output captured.
Passed suites/password/regression_of_bugs_test.py::test_pwd_update_time_attribute 3.24
No log output captured.
Passed suites/password/regression_of_bugs_test.py::test_password_track_update_time 7.26
No log output captured.
Passed suites/password/regression_of_bugs_test.py::test_signal_11 0.05
No log output captured.
Passed suites/password/regression_test.py::test_pwp_local_unlock 4.09
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO  tests.suites.password.regression_test:regression_test.py:68 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to on INFO  tests.suites.password.regression_test:regression_test.py:75 Configure subtree password policy for ou=people,dc=example,dc=com INFO  tests.suites.password.regression_test:regression_test.py:98 Adding user-uid=UIDpwtest1,ou=people,dc=example,dc=com
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:137 Verify user can bind... INFO  tests.suites.password.regression_test:regression_test.py:140 Test passwordUnlock default - user should be able to reset password after lockout INFO  tests.suites.password.regression_test:regression_test.py:151 Verify account is locked INFO  tests.suites.password.regression_test:regression_test.py:155 Wait for lockout duration... INFO  tests.suites.password.regression_test:regression_test.py:158 Check if user can now bind with correct password
Passed suites/password/regression_test.py::test_trivial_passw_check[CNpwtest1] 0.07
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:183 Replace userPassword attribute with CNpwtest1
Passed suites/password/regression_test.py::test_trivial_passw_check[SNpwtest1] 0.07
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:183 Replace userPassword attribute with SNpwtest1
Passed suites/password/regression_test.py::test_trivial_passw_check[UIDpwtest1] 0.08
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:183 Replace userPassword attribute with UIDpwtest1
Passed suites/password/regression_test.py::test_trivial_passw_check[MAILpwtest1@redhat.com] 0.06
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:183 Replace userPassword attribute with MAILpwtest1@redhat.com
Passed suites/password/regression_test.py::test_trivial_passw_check[GNpwtest1] 0.06
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:183 Replace userPassword attribute with GNpwtest1
Passed suites/password/regression_test.py::test_trivial_passw_check[CNpwtest1ZZZZ] 0.06
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:183 Replace userPassword attribute with CNpwtest1ZZZZ
Passed suites/password/regression_test.py::test_trivial_passw_check[ZZZZZCNpwtest1] 0.06
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:183 Replace userPassword attribute with ZZZZZCNpwtest1
Passed suites/password/regression_test.py::test_trivial_passw_check[ZCNpwtest1] 0.06
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:183 Replace userPassword attribute with ZCNpwtest1
Passed suites/password/regression_test.py::test_trivial_passw_check[CNpwtest1Z] 0.06
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:183 Replace userPassword attribute with CNpwtest1Z
Passed suites/password/regression_test.py::test_trivial_passw_check[ZCNpwtest1Z] 0.06
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:183 Replace userPassword attribute with ZCNpwtest1Z
Passed suites/password/regression_test.py::test_trivial_passw_check[ZZCNpwtest1] 0.06
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:183 Replace userPassword attribute with ZZCNpwtest1
Passed suites/password/regression_test.py::test_trivial_passw_check[CNpwtest1ZZ] 0.06
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:183 Replace userPassword attribute with CNpwtest1ZZ
Passed suites/password/regression_test.py::test_trivial_passw_check[ZZCNpwtest1ZZ] 0.06
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:183 Replace userPassword attribute with ZZCNpwtest1ZZ
Passed suites/password/regression_test.py::test_trivial_passw_check[ZZZCNpwtest1] 0.06
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:183 Replace userPassword attribute with ZZZCNpwtest1
Passed suites/password/regression_test.py::test_trivial_passw_check[CNpwtest1ZZZ] 0.06
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:183 Replace userPassword attribute with CNpwtest1ZZZ
Passed suites/password/regression_test.py::test_trivial_passw_check[ZZZCNpwtest1ZZZ] 0.07
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:183 Replace userPassword attribute with ZZZCNpwtest1ZZZ
Passed suites/password/regression_test.py::test_trivial_passw_check[ZZZZZZCNpwtest1ZZZZZZZZ] 0.09
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:183 Replace userPassword attribute with ZZZZZZCNpwtest1ZZZZZZZZ
Passed suites/password/regression_test.py::test_global_vs_local[CNpwtest1] 0.12
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:211 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off INFO  tests.suites.password.regression_test:regression_test.py:216 Replace userPassword attribute with CNpwtest1
Passed suites/password/regression_test.py::test_global_vs_local[SNpwtest1] 0.12
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:211 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off INFO  tests.suites.password.regression_test:regression_test.py:216 Replace userPassword attribute with SNpwtest1
Passed suites/password/regression_test.py::test_global_vs_local[UIDpwtest1] 0.34
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:211 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off INFO  tests.suites.password.regression_test:regression_test.py:216 Replace userPassword attribute with UIDpwtest1
Passed suites/password/regression_test.py::test_global_vs_local[MAILpwtest1@redhat.com] 0.12
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:211 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off INFO  tests.suites.password.regression_test:regression_test.py:216 Replace userPassword attribute with MAILpwtest1@redhat.com
Passed suites/password/regression_test.py::test_global_vs_local[GNpwtest1] 0.12
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:211 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off INFO  tests.suites.password.regression_test:regression_test.py:216 Replace userPassword attribute with GNpwtest1
Passed suites/password/regression_test.py::test_global_vs_local[CNpwtest1ZZZZ] 0.12
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:211 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off INFO  tests.suites.password.regression_test:regression_test.py:216 Replace userPassword attribute with CNpwtest1ZZZZ
Passed suites/password/regression_test.py::test_global_vs_local[ZZZZZCNpwtest1] 0.23
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:211 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off INFO  tests.suites.password.regression_test:regression_test.py:216 Replace userPassword attribute with ZZZZZCNpwtest1
Passed suites/password/regression_test.py::test_global_vs_local[ZCNpwtest1] 0.12
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:211 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off INFO  tests.suites.password.regression_test:regression_test.py:216 Replace userPassword attribute with ZCNpwtest1
Passed suites/password/regression_test.py::test_global_vs_local[CNpwtest1Z] 0.14
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:211 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off INFO  tests.suites.password.regression_test:regression_test.py:216 Replace userPassword attribute with CNpwtest1Z
Passed suites/password/regression_test.py::test_global_vs_local[ZCNpwtest1Z] 0.12
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:211 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off INFO  tests.suites.password.regression_test:regression_test.py:216 Replace userPassword attribute with ZCNpwtest1Z
Passed suites/password/regression_test.py::test_global_vs_local[ZZCNpwtest1] 0.12
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:211 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off INFO  tests.suites.password.regression_test:regression_test.py:216 Replace userPassword attribute with ZZCNpwtest1
Passed suites/password/regression_test.py::test_global_vs_local[CNpwtest1ZZ] 0.12
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:211 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off INFO  tests.suites.password.regression_test:regression_test.py:216 Replace userPassword attribute with CNpwtest1ZZ
Passed suites/password/regression_test.py::test_global_vs_local[ZZCNpwtest1ZZ] 0.12
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:211 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off INFO  tests.suites.password.regression_test:regression_test.py:216 Replace userPassword attribute with ZZCNpwtest1ZZ
Passed suites/password/regression_test.py::test_global_vs_local[ZZZCNpwtest1] 0.12
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:211 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off INFO  tests.suites.password.regression_test:regression_test.py:216 Replace userPassword attribute with ZZZCNpwtest1
Passed suites/password/regression_test.py::test_global_vs_local[CNpwtest1ZZZ] 0.12
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:211 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off INFO  tests.suites.password.regression_test:regression_test.py:216 Replace userPassword attribute with CNpwtest1ZZZ
Passed suites/password/regression_test.py::test_global_vs_local[ZZZCNpwtest1ZZZ] 0.12
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:211 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off INFO  tests.suites.password.regression_test:regression_test.py:216 Replace userPassword attribute with ZZZCNpwtest1ZZZ
Passed suites/password/regression_test.py::test_global_vs_local[ZZZZZZCNpwtest1ZZZZZZZZ] 0.12
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:211 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off INFO  tests.suites.password.regression_test:regression_test.py:216 Replace userPassword attribute with ZZZZZZCNpwtest1ZZZZZZZZ
Passed suites/password/regression_test.py::test_unhashed_pw_switch 32.64
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology.
-------------------------------Captured log call--------------------------------
INFO  tests.suites.password.regression_test:regression_test.py:257 Enable plugins... INFO  tests.suites.password.regression_test:regression_test.py:272 create users and group... INFO  lib389:__init__.py:3014 Running script: ['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-master1/db/userRoot/replication_changelog.db'] INFO  lib389:__init__.py:3014 Running script: ['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-master1/db/userRoot/replication_changelog.db'] INFO  lib389:__init__.py:3014 Running script: ['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-master1/db/userRoot/replication_changelog.db']
Passed suites/plugins/acceptance_test.py::test_acctpolicy 13.55
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38902, 'ldap-secureport': 63602, 'server-id': 'standalone2', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/plugins/acceptance_test.py::test_attruniq 17.66
No log output captured.
Passed suites/plugins/acceptance_test.py::test_automember 24.92
No log output captured.
Passed suites/plugins/acceptance_test.py::test_dna 17.23
No log output captured.
Passed suites/plugins/acceptance_test.py::test_linkedattrs 24.58
No log output captured.
Passed suites/plugins/acceptance_test.py::test_memberof 34.44
No log output captured.
Passed suites/plugins/acceptance_test.py::test_mep 17.79
No log output captured.
Passed suites/plugins/acceptance_test.py::test_passthru 22.22
No log output captured.
Passed suites/plugins/acceptance_test.py::test_referint 13.08
No log output captured.
Passed suites/plugins/acceptance_test.py::test_retrocl 22.46
No log output captured.
Passed suites/plugins/acceptance_test.py::test_rootdn 34.38
No log output captured.
Passed suites/plugins/accpol_test.py::test_glact_inact 28.01
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.utils:accpol_test.py:35 Configuring Global account policy plugin, pwpolicy attributes and restarting the server
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:accpol_test.py:348 AccountInactivityLimit set to 12. Account will be inactivated if not accessed in 12 secs INFO  lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO  lib389.utils:accpol_test.py:351 Sleep for 10 secs to check if account is not inactivated, expected value 0 INFO  lib389.utils:accpol_test.py:353 Account should not be inactivated since AccountInactivityLimit not exceeded INFO  lib389.utils:accpol_test.py:356 Sleep for 3 more secs to check if account is inactivated INFO  lib389.utils:accpol_test.py:360 Sleep +10 secs to check if account glinactusr3 is inactivated INFO  lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs
Passed suites/plugins/accpol_test.py::test_glremv_lastlogin 19.11
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:accpol_test.py:394 AccountInactivityLimit set to 12. Account will be inactivated if not accessed in 12 secs INFO  lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO  lib389.utils:accpol_test.py:396 Sleep for 6 secs to check if account is not inactivated, expected value 0 INFO  lib389.utils:accpol_test.py:398 Account should not be inactivated since AccountInactivityLimit not exceeded INFO  lib389.utils:accpol_test.py:254 Delete lastLoginTime/createTimeStamp/ModifyTimeStamp attribute from user account INFO  lib389.utils:accpol_test.py:401 Sleep for 7 more secs to check if account is inactivated INFO  lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute INFO  lib389.utils:accpol_test.py:405 Check if account is activated, expected 0 INFO  lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs
Passed suites/plugins/accpol_test.py::test_glact_login 23.31
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:accpol_test.py:434 AccountInactivityLimit set to 12. Account will be inactivated if not accessed in 12 secs INFO  lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO  lib389.utils:accpol_test.py:436 Sleep for 13 secs to check if account is inactivated, expected error 19 INFO  lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute INFO  lib389.utils:accpol_test.py:440 Check if account is activated, expected 0 INFO  lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs
Passed suites/plugins/accpol_test.py::test_glinact_limit 122.47
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:accpol_test.py:492 AccountInactivityLimit set to 12. Account will be inactivated if not accessed in 12 secs INFO  lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO  lib389.utils:accpol_test.py:494 Sleep for 9 secs to check if account is not inactivated, expected 0 INFO  lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO  lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO  lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO  lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO  lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO  lib389.utils:accpol_test.py:516 Check if account is activated, expected 0 INFO  lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute INFO  lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO  lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs
Passed suites/plugins/accpol_test.py::test_glnologin_attr 83.67
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:accpol_test.py:575 AccountInactivityLimit set to 12. Account will be inactivated if not accessed in 12 secs INFO  lib389.utils:accpol_test.py:576 Set attribute StateAttrName to createTimestamp, loginTime attr wont be considered INFO  lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO  lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO  lib389.utils:accpol_test.py:580 Sleep for 9 secs to check if account is not inactivated, expected 0 INFO  lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO  lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO  lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO  lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO  lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO  lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO  lib389.utils:accpol_test.py:605 Set attribute StateAttrName to lastLoginTime, the default INFO  lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO  lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute INFO  lib389.utils:accpol_test.py:609 Check if account is activated, expected 0 INFO  lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs
Passed suites/plugins/accpol_test.py::test_glnoalt_stattr 56.60
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:accpol_test.py:640 Set attribute altStateAttrName to 1.1 INFO  lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO  lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO  lib389.utils:accpol_test.py:644 Sleep for 13 secs to check if account is not inactivated, expected 0 INFO  lib389.utils:accpol_test.py:647 lastLoginTime attribute is added from the above ldap bind by userdn INFO  lib389.utils:accpol_test.py:254 Delete lastLoginTime/createTimeStamp/ModifyTimeStamp attribute from user account INFO  lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO  lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute INFO  lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs
Passed suites/plugins/accpol_test.py::test_glattr_modtime 44.83
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:accpol_test.py:687 Set attribute altStateAttrName to modifyTimestamp INFO  lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO  lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO  lib389.utils:accpol_test.py:691 Sleep for 13 secs to check if account is inactivated, expected 0 INFO  lib389.utils:accpol_test.py:202 Check ModifyTimeStamp attribute present for user INFO  lib389.utils:accpol_test.py:237 Enable account by replacing cn attribute value, value of modifyTimeStamp changed INFO  lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO  lib389.utils:accpol_test.py:254 Delete lastLoginTime/createTimeStamp/ModifyTimeStamp attribute from user account INFO  lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute INFO  lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs
Passed suites/plugins/accpol_test.py::test_glnoalt_nologin 50.78
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:accpol_test.py:742 Set attribute altStateAttrName to 1.1 INFO  lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO  lib389.utils:accpol_test.py:744 Set attribute alwaysrecordlogin to No INFO  lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO  lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO  lib389.utils:accpol_test.py:748 Sleep for 13 secs to check if account is not inactivated, expected 0 INFO  lib389.utils:accpol_test.py:753 Set attribute altStateAttrName to createTimestamp INFO  lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO  lib389.utils:accpol_test.py:758 Reset the default attribute values INFO  lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO  lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute INFO  lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs
Passed suites/plugins/accpol_test.py::test_glinact_nsact 22.83
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:accpol_test.py:799 AccountInactivityLimit set to 12. Account will be inactivated if not accessed in 12 secs INFO  lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO  lib389.utils:accpol_test.py:801 Sleep for 3 secs to check if account is not inactivated, expected value 0 INFO  lib389.utils:accpol_test.py:162 Account activate/in-activate/status using dsidm INFO  lib389.utils:accpol_test.py:166 Running unlock for user uid=nsactusr1,ou=groups,dc=example,dc=com INFO  lib389.utils:accpol_test.py:174 Running ['/usr/sbin/dsidm', 'slapd-standalone1', '-b', 'dc=example,dc=com', 'account', 'unlock', 'uid=nsactusr1,ou=groups,dc=example,dc=com'] for user uid=nsactusr1,ou=groups,dc=example,dc=com INFO  lib389.utils:accpol_test.py:180 output: b'Error: Account is already active\n' INFO  lib389.utils:accpol_test.py:804 Sleep for 10 secs to check if account is inactivated, expected value 19 INFO  lib389.utils:accpol_test.py:162 Account activate/in-activate/status using dsidm INFO  lib389.utils:accpol_test.py:166 Running unlock for user uid=nsactusr1,ou=groups,dc=example,dc=com INFO  lib389.utils:accpol_test.py:174 Running ['/usr/sbin/dsidm', 'slapd-standalone1', '-b', 'dc=example,dc=com', 'account', 'unlock', 'uid=nsactusr1,ou=groups,dc=example,dc=com'] for user uid=nsactusr1,ou=groups,dc=example,dc=com INFO  lib389.utils:accpol_test.py:180 output: b'Error: 103 - 22 - 16 - No such attribute - []\n' INFO  lib389.utils:accpol_test.py:162 Account activate/in-activate/status using dsidm INFO  lib389.utils:accpol_test.py:166 Running entry-status for user uid=nsactusr1,ou=groups,dc=example,dc=com INFO  lib389.utils:accpol_test.py:174 Running ['/usr/sbin/dsidm', 'slapd-standalone1', '-b', 'dc=example,dc=com', 'account', 'entry-status', 'uid=nsactusr1,ou=groups,dc=example,dc=com'] for user uid=nsactusr1,ou=groups,dc=example,dc=com INFO  lib389.utils:accpol_test.py:180 output: b'Entry DN: uid=nsactusr1,ou=groups,dc=example,dc=com\nEntry Creation Date: 20201029021429Z (2020-10-29 02:14:29)\nEntry Modification Date: 20201029021429Z (2020-10-29 02:14:29)\nEntry Last Login Date: 20201029021429Z (2020-10-29 02:14:29)\nEntry Time Since Inactive: 5 seconds (2020-10-29 03:14:41)\nEntry State: inactivity limit exceeded\n\n' INFO  lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute INFO  lib389.utils:accpol_test.py:162 Account activate/in-activate/status using dsidm INFO  lib389.utils:accpol_test.py:166 Running entry-status for user uid=nsactusr1,ou=groups,dc=example,dc=com INFO  lib389.utils:accpol_test.py:174 Running ['/usr/sbin/dsidm', 'slapd-standalone1', '-b', 'dc=example,dc=com', 'account', 'entry-status', 'uid=nsactusr1,ou=groups,dc=example,dc=com'] for user uid=nsactusr1,ou=groups,dc=example,dc=com INFO  lib389.utils:accpol_test.py:180 output: b'Entry DN: uid=nsactusr1,ou=groups,dc=example,dc=com\nEntry Creation Date: 20201029021429Z (2020-10-29 02:14:29)\nEntry Modification Date: 20201029021447Z (2020-10-29 02:14:47)\nEntry Last Login Date: 20201029021449Z (2020-10-29 02:14:49)\nEntry Time Until Inactive: 11 seconds (2020-10-29 03:15:01)\nEntry State: activated\n\n' INFO  lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs
Passed suites/plugins/accpol_test.py::test_glinact_acclock 40.32
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:accpol_test.py:844 AccountInactivityLimit set to 12. Account will be inactivated if not accessed in 12 secs INFO  lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO  lib389.utils:accpol_test.py:846 Sleep for 3 secs and try invalid binds to lockout the user INFO  lib389.utils:accpol_test.py:118 Lockout user account by attempting invalid password binds INFO  lib389.utils:accpol_test.py:850 Sleep for 10 secs to check if account is inactivated, expected value 19 INFO  lib389.utils:accpol_test.py:854 Add lastLoginTime to activate the user account INFO  lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute INFO  lib389.utils:accpol_test.py:858 Checking if account is unlocked after passwordlockoutduration, but inactivated after accountInactivityLimit INFO  lib389.utils:accpol_test.py:118 Lockout user account by attempting invalid password binds INFO  lib389.utils:accpol_test.py:862 Account is expected to be unlocked after 5 secs of passwordlockoutduration INFO  lib389.utils:accpol_test.py:866 Sleep 13s and check if account inactivated based on accountInactivityLimit, expected 19 INFO  lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs
Passed suites/plugins/accpol_test.py::test_glnact_pwexp 49.34
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:accpol_test.py:909 AccountInactivityLimit set to 12. Account will be inactivated if not accessed in 12 secs INFO  lib389.utils:accpol_test.py:910 Passwordmaxage is set to 9. Password will expire in 9 secs INFO  lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO  lib389.utils:accpol_test.py:913 Sleep for 9 secs and check if password expired INFO  lib389.utils:accpol_test.py:919 Add lastLoginTime to activate the user account INFO  lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute INFO  lib389.utils:accpol_test.py:141 Reset user password for user-uid=pwexpusr1,ou=groups,dc=example,dc=com INFO  lib389.utils:accpol_test.py:141 Reset user password for user-uid=pwexpusr1,ou=groups,dc=example,dc=com INFO  lib389.utils:accpol_test.py:930 Sleep for 4 secs and check if account is now inactivated, expected error 19 INFO  lib389.utils:accpol_test.py:141 Reset user password for user-uid=pwexpusr1,ou=groups,dc=example,dc=com INFO  lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute INFO  lib389.utils:accpol_test.py:141 Reset user password for user-uid=pwexpusr1,ou=groups,dc=example,dc=com INFO  lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs
Passed suites/plugins/accpol_test.py::test_locact_inact 31.34
-------------------------------Captured log setup-------------------------------
INFO  lib389.utils:accpol_test.py:80 Adding Local account policy plugin configuration entries
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:accpol_test.py:981 AccountInactivityLimit set to 10. Account will be inactivated if not accessed in 10 secs INFO  lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO  lib389.utils:accpol_test.py:983 Sleep for 9 secs to check if account is not inactivated, expected value 0 INFO  lib389.utils:accpol_test.py:985 Account should not be inactivated since AccountInactivityLimit not exceeded INFO  lib389.utils:accpol_test.py:987 Sleep for 2 more secs to check if account is inactivated INFO  lib389.utils:accpol_test.py:990 Sleep +9 secs to check if account inactusr3 is inactivated INFO  lib389.utils:accpol_test.py:993 Add lastLoginTime attribute to all users and check if its activated INFO  lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute INFO  lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs
Passed suites/plugins/accpol_test.py::test_locinact_modrdn 27.12
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:accpol_test.py:1027 Account should not be inactivated since the subtree is not configured INFO  lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO  lib389.utils:accpol_test.py:1029 Sleep for 11 secs to check if account is not inactivated, expected value 0 INFO  lib389.utils:accpol_test.py:1032 Moving users from ou=groups to ou=people subtree INFO  lib389.utils:accpol_test.py:1040 Then wait for 11 secs and check if entries are inactivated INFO  lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute INFO  lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs
Passed suites/plugins/accpol_test.py::test_locact_modrdn 15.08
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:accpol_test.py:1072 Account should be inactivated since the subtree is configured INFO  lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO  lib389.utils:accpol_test.py:1074 Sleep for 11 secs to check if account is inactivated, expected value 19 INFO  lib389.utils:accpol_test.py:1077 Moving users from ou=people to ou=groups subtree INFO  lib389.utils:accpol_test.py:1084 Sleep for +2 secs and check users from both ou=people and ou=groups subtree INFO  lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs
Passed suites/plugins/attr_nsslapd-pluginarg_test.py::test_duplicate_values 3.42
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO  tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:31 Ticket 47431 - 0: Enable 7bit plugin...
-------------------------------Captured log call--------------------------------
INFO  tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:56 Ticket 47431 - 1: Check 26 duplicate values are treated as one... DEBUG  tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:59 modify_s cn=7-bit check,cn=plugins,cn=config DEBUG  tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:83 line: [28/Oct/2020:22:18:03.098597266 -0400] - WARN - str2entry_dupcheck - 26 duplicate values for attribute type nsslapd-pluginarg2 detected in entry cn=7-bit check,cn=plugins,cn=config. Extra values ignored. INFO  tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:84 Expected error "str2entry_dupcheck.* duplicate values for attribute type nsslapd-pluginarg2 detected in entry cn=7-bit check,cn=plugins,cn=config." logged in /var/log/dirsrv/slapd-standalone1/errors INFO  tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:86 Ticket 47431 - 1: done
Passed suites/plugins/attr_nsslapd-pluginarg_test.py::test_multiple_value 5.92
-------------------------------Captured log call--------------------------------
INFO  tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:111 Ticket 47431 - 2: Check two values belonging to one arg is fixed... DEBUG  tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:131 line - [28/Oct/2020:22:18:07.761562918 -0400] - DEBUG - NS7bitAttr - NS7bitAttr_Init - 0: uid DEBUG  tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:132 ATTRS[0] uid DEBUG  tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:136 uid was logged DEBUG  tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:131 line - [28/Oct/2020:22:18:07.766796733 -0400] - DEBUG - NS7bitAttr - NS7bitAttr_Init - 1: mail DEBUG  tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:132 ATTRS[1] mail DEBUG  tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:136 mail was logged DEBUG  tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:131 line - [28/Oct/2020:22:18:07.771778431 -0400] - DEBUG - NS7bitAttr - NS7bitAttr_Init - 2: userpassword DEBUG  tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:132 ATTRS[2] userpassword DEBUG  tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:136 userpassword was logged DEBUG  tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:131 line - [28/Oct/2020:22:18:07.775197910 -0400] - DEBUG - NS7bitAttr - NS7bitAttr_Init - 3: , DEBUG  tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:132 ATTRS[3] , DEBUG  tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:136 , was logged DEBUG  tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:131 line - [28/Oct/2020:22:18:07.778029416 -0400] - DEBUG - NS7bitAttr - NS7bitAttr_Init - 4: dc=example,dc=com DEBUG  tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:132 ATTRS[4] dc=example,dc=com DEBUG  tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:136 dc=example,dc=com was logged INFO  tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:142 Ticket 47431 - 2: done
Passed suites/plugins/attr_nsslapd-pluginarg_test.py::test_missing_args 4.23
-------------------------------Captured log call--------------------------------
INFO  tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:170 Ticket 47431 - 3: Check missing args are fixed... DEBUG  tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:197 uid was logged DEBUG  tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:197 mail was logged DEBUG  tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:197 userpassword was logged DEBUG  tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:197 , was logged DEBUG  tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:197 dc=example,dc=com was logged INFO  tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:203 Ticket 47431 - 3: done INFO  tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:204 Test complete
Passed suites/plugins/cos_test.py::test_cos_operational_default 4.53
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
CRITICAL lib389.utils:cos_test.py:27 Adding user (uid=user_0,ou=people,dc=example,dc=com): INFO  lib389.utils:cos_test.py:153 Returned telephonenumber (exp. real): b'1234 is real' INFO  lib389.utils:cos_test.py:154 Returned telephonenumber: 8 INFO  lib389.utils:cos_test.py:160 Returned l (exp. real): b'here is real' INFO  lib389.utils:cos_test.py:161 Returned l: 8 INFO  lib389.utils:cos_test.py:170 Returned seealso (exp. virtual): b'dc=virtual,dc=example,dc=com' INFO  lib389.utils:cos_test.py:171 Returned seealso: 3 INFO  lib389.utils:cos_test.py:180 Returned description (exp. virtual): b'desc is virtual' INFO  lib389.utils:cos_test.py:181 Returned description: 8 INFO  lib389.utils:cos_test.py:191 Returned title (exp. real): b'title is real' INFO  lib389.utils:cos_test.py:212 Returned title(exp. virt): b'title is virtual 1' INFO  lib389.utils:cos_test.py:212 Returned title(exp. virt): b'title is virtual 0'
Passed suites/plugins/deref_aci_test.py::test_deref_and_access_control 0.78
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.suites.plugins.deref_aci_test:deref_aci_test.py:133 Check, that the dereference search result does not have userpassword
Passed suites/plugins/dna_test.py::test_dnatype_only_valid 4.71
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/plugins/entryusn_test.py::test_entryusn_no_duplicates 5.47
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/plugins/entryusn_test.py::test_entryusn_is_same_after_failure 5.46
No log output captured.
Passed suites/plugins/entryusn_test.py::test_entryusn_after_repl_delete 4.32
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ad764e5a-d227-4ecd-99f5-4f49311ba0c4 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect a652a00d-65de-40f4-8faa-23163dddb392 / got description=ad764e5a-d227-4ecd-99f5-4f49311ba0c4) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
Passed suites/plugins/managed_entry_test.py::test_binddn_tracking 2.32
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/plugins/managed_entry_test.py::test_mentry01 8.94
No log output captured.
Passed suites/plugins/managed_entry_test.py::test_managed_entry_removal 4.40
No log output captured.
Passed suites/plugins/memberof_test.py::test_betxnpostoperation_replace 4.44
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/plugins/memberof_test.py::test_memberofgroupattr_add 0.01
No log output captured.
Passed suites/plugins/memberof_test.py::test_enable 4.41
-------------------------------Captured log call--------------------------------
INFO  tests.suites.plugins.memberof_test:memberof_test.py:181 Enable MemberOf plugin
Passed suites/plugins/memberof_test.py::test_member_add 0.34
-------------------------------Captured log call--------------------------------
INFO  tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofenh1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofenh2,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:211 Update b'uid=user_memofenh1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp1,ou=groups,dc=example,dc=com' (member) INFO  tests.suites.plugins.memberof_test:memberof_test.py:212 Update b'uid=user_memofenh2,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp1,ou=groups,dc=example,dc=com' (uniqueMember) INFO  tests.suites.plugins.memberof_test:memberof_test.py:215 Update b'uid=user_memofenh1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp2,ou=groups,dc=example,dc=com' (member) INFO  tests.suites.plugins.memberof_test:memberof_test.py:216 Update b'uid=user_memofenh2,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp2,ou=groups,dc=example,dc=com' (uniqueMember) INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified
Passed suites/plugins/memberof_test.py::test_member_delete_gr1 0.01
-------------------------------Captured log call--------------------------------
INFO  tests.suites.plugins.memberof_test:memberof_test.py:246 Update b'uid=user_memofenh1,ou=people,dc=example,dc=com' is no longer memberof b'cn=group_memofegrp1,ou=groups,dc=example,dc=com' (member) INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified
Passed suites/plugins/memberof_test.py::test_member_delete_gr2 0.01
-------------------------------Captured log call--------------------------------
INFO  tests.suites.plugins.memberof_test:memberof_test.py:278 Update b'uid=user_memofenh1,ou=people,dc=example,dc=com' is no longer memberof b'cn=group_memofegrp1,ou=groups,dc=example,dc=com' (uniqueMember) INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com
Passed suites/plugins/memberof_test.py::test_member_delete_all 0.02
-------------------------------Captured log call--------------------------------
INFO  tests.suites.plugins.memberof_test:memberof_test.py:310 Update b'uid=user_memofenh2,ou=people,dc=example,dc=com' is no longer memberof b'cn=group_memofegrp1,ou=groups,dc=example,dc=com' (uniqueMember) INFO  tests.suites.plugins.memberof_test:memberof_test.py:314 Update b'uid=user_memofenh1,ou=people,dc=example,dc=com' is no longer memberof b'cn=group_memofegrp2,ou=groups,dc=example,dc=com' (member) INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com'
Passed suites/plugins/memberof_test.py::test_member_after_restart 8.49
-------------------------------Captured log call--------------------------------
INFO  tests.suites.plugins.memberof_test:memberof_test.py:349 Update b'uid=user_memofenh1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp1,ou=groups,dc=example,dc=com' (member) INFO  tests.suites.plugins.memberof_test:memberof_test.py:353 Update b'uid=user_memofenh2,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp2,ou=groups,dc=example,dc=com' (uniqueMember) INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:364 Remove uniqueMember as a memberofgrpattr INFO  tests.suites.plugins.memberof_test:memberof_test.py:371 Assert that this change of configuration did change the already set values INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified
Passed suites/plugins/memberof_test.py::test_memberofgroupattr_uid 0.00
-------------------------------Captured log call--------------------------------
ERROR  tests.suites.plugins.memberof_test:memberof_test.py:400 Setting 'memberUid' as memberofgroupattr is rejected (expected)
Passed suites/plugins/memberof_test.py::test_member_add_duplicate_usr1 0.18
-------------------------------Captured log call--------------------------------
INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:422 Try b'uid=user_memofenh1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp1,ou=groups,dc=example,dc=com' (member) ERROR  tests.suites.plugins.memberof_test:memberof_test.py:429 b'uid=user_memofenh1,ou=people,dc=example,dc=com' already member of b'cn=group_memofegrp1,ou=groups,dc=example,dc=com' --> fail (expected)
Passed suites/plugins/memberof_test.py::test_member_add_duplicate_usr2 0.02
-------------------------------Captured log call--------------------------------
INFO  tests.suites.plugins.memberof_test:memberof_test.py:450 Check initial status INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:460 Try b'uid=user_memofenh2,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp2,ou=groups,dc=example,dc=com' (member) ERROR  tests.suites.plugins.memberof_test:memberof_test.py:467 b'uid=user_memofenh2,ou=people,dc=example,dc=com' already member of b'cn=group_memofegrp2,ou=groups,dc=example,dc=com' --> fail (expected) INFO  tests.suites.plugins.memberof_test:memberof_test.py:470 Check final status INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified
Passed suites/plugins/memberof_test.py::test_member_uniquemember_same_user 0.07
-------------------------------Captured log call--------------------------------
INFO  tests.suites.plugins.memberof_test:memberof_test.py:557 Check initial status INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:569 Update b'uid=user_memofenh1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp3,ou=groups,dc=example,dc=com' (member) INFO  tests.suites.plugins.memberof_test:memberof_test.py:570 Update b'uid=user_memofenh1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp3,ou=groups,dc=example,dc=com' (uniqueMember) INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:582 Update b'uid=user_memofenh1,ou=people,dc=example,dc=com' is not memberof b'cn=group_memofegrp3,ou=groups,dc=example,dc=com' (member) INFO  tests.suites.plugins.memberof_test:memberof_test.py:586 Update b'uid=user_memofenh2,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp3,ou=groups,dc=example,dc=com' (member) INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:613 Checking final status INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified
Passed suites/plugins/memberof_test.py::test_member_not_exists 0.03
-------------------------------Captured log call--------------------------------
INFO  tests.suites.plugins.memberof_test:memberof_test.py:671 Checking Initial status INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp015,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:691 Update b'uid=user_dummy1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp015,ou=groups,dc=example,dc=com' (member) INFO  tests.suites.plugins.memberof_test:memberof_test.py:692 Update b'uid=user_dummy2,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp015,ou=groups,dc=example,dc=com' (uniqueMember) INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com
Passed suites/plugins/memberof_test.py::test_member_not_exists_complex 0.05
-------------------------------Captured log call--------------------------------
INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:806 Update b'uid=user_memofenh1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp016,ou=groups,dc=example,dc=com' (member) INFO  tests.suites.plugins.memberof_test:memberof_test.py:807 Update b'uid=user_memofenh1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp016,ou=groups,dc=example,dc=com' (uniqueMember) INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:835 Update b'uid=user_dummy1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp016,ou=groups,dc=example,dc=com' (member) INFO  tests.suites.plugins.memberof_test:memberof_test.py:845 Update b'uid=user_dummy1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp016,ou=groups,dc=example,dc=com' (uniqueMember) INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com
Passed suites/plugins/memberof_test.py::test_complex_group_scenario_1 0.06
-------------------------------Captured log call--------------------------------
INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofuser2,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofuser3,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:1011 Update b'uid=user_memofuser1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp017,ou=groups,dc=example,dc=com' (member) INFO  tests.suites.plugins.memberof_test:memberof_test.py:1012 Update b'uid=user_memofuser2,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp017,ou=groups,dc=example,dc=com' (uniqueMember) INFO  tests.suites.plugins.memberof_test:memberof_test.py:1013 Update b'uid=user_memofuser3,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp017,ou=groups,dc=example,dc=com' (memberuid) INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com'
Passed suites/plugins/memberof_test.py::test_complex_group_scenario_2 0.14
-------------------------------Captured log call--------------------------------
INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp018,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:1261 Update b'uid=user_memofuser1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp017,ou=groups,dc=example,dc=com' (member) INFO  tests.suites.plugins.memberof_test:memberof_test.py:1262 Update b'uid=user_memofuser1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp017,ou=groups,dc=example,dc=com' (uniqueMember) INFO  tests.suites.plugins.memberof_test:memberof_test.py:1263 Update b'uid=user_memofuser1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp017,ou=groups,dc=example,dc=com' (memberuid) INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp018,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp018,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp018,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp018,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp018,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp018,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:1283 Update b'uid=user_memofuser1,ou=people,dc=example,dc=com' is no longer memberof b'cn=group_memofegrp018,ou=groups,dc=example,dc=com' (member) INFO  tests.suites.plugins.memberof_test:memberof_test.py:1284 Update b'uid=user_memofuser1,ou=people,dc=example,dc=com' is no longer memberof b'cn=group_memofegrp018,ou=groups,dc=example,dc=com' (uniqueMember) INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com
Passed suites/plugins/memberof_test.py::test_complex_group_scenario_3 0.19
-------------------------------Captured log call--------------------------------
INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofuser2,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofuser3,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp019_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp019_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com
Passed suites/plugins/memberof_test.py::test_complex_group_scenario_4 0.17
-------------------------------Captured log call--------------------------------
INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified
Passed suites/plugins/memberof_test.py::test_complex_group_scenario_5 0.16
-------------------------------Captured log call--------------------------------
INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofuser2,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofuser3,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofuser4,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com
Passed suites/plugins/memberof_test.py::test_complex_group_scenario_6 5.44
-------------------------------Captured log call--------------------------------
INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser2,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser3,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser4,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com
Passed suites/plugins/memberof_test.py::test_complex_group_scenario_7 0.11
-------------------------------Captured log call--------------------------------
INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser2,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser3,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser4,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser2,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser3,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser4,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser2,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser3,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser4,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified
Passed suites/plugins/memberof_test.py::test_complex_group_scenario_8 0.06
-------------------------------Captured log call--------------------------------
INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser2,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser3,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser4,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser2,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser3,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser4,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser2,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser3,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser4,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser2,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser3,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser4,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified
Passed suites/plugins/memberof_test.py::test_complex_group_scenario_9 0.14
-------------------------------Captured log call--------------------------------
INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser2,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser3,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser4,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser2,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser3,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser4,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com'
Passed suites/plugins/memberof_test.py::test_memberof_auto_add_oc 0.14
-------------------------------Captured log call--------------------------------
INFO  tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from uid=user1,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group,dc=example,dc=com INFO  tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO  tests.suites.plugins.memberof_test:memberof_test.py:2753 Correctly rejected invalid objectclass INFO  tests.suites.plugins.memberof_test:memberof_test.py:2823 Test complete.
Passed suites/plugins/pluginpath_validation_test.py::test_pluginpath_validation 0.56
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.suites.plugins.pluginpath_validation_test:pluginpath_validation_test.py:103 Test complete
Passed suites/plugins/referint_test.py::test_referential_false_failure 7.11
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo0,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo1,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo2,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo3,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo4,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo5,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo6,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo7,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo8,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo9,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo10,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo11,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo12,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo13,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo14,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo15,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo16,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo17,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo18,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo19,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo20,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo21,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo22,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo23,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo24,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo25,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo26,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo27,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo28,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo29,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo30,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo31,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo32,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo33,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo34,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo35,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo36,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo37,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo38,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo39,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo40,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo41,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo42,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo43,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo44,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo45,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo46,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo47,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo48,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo49,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo50,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo51,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo52,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo53,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo54,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo55,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo56,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo57,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo58,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo59,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo60,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo61,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo62,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo63,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo64,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo65,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo66,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo67,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo68,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo69,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo70,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo71,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo72,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo73,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo74,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo75,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo76,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo77,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo78,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo79,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo80,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo81,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo82,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo83,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo84,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo85,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo86,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo87,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo88,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo89,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo90,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo91,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo92,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo93,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo94,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo95,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo96,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo97,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo98,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo99,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo100,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo101,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo102,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo103,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo104,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo105,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo106,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo107,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo108,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo109,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo110,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo111,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo112,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo113,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo114,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo115,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo116,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo117,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo118,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo119,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo120,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo121,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo122,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo123,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo124,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo125,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo126,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo127,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo128,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo129,dc=example,dc=com):
Passed suites/plugins/rootdn_plugin_test.py::test_rootdn_access_specific_time 0.78
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO  tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:60 Initializing root DN test suite... INFO  tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:70 test_rootdn_init: Initialized root DN test suite. INFO  tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:36 Cleaning up the config area
-------------------------------Captured log call--------------------------------
INFO  tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:102 Running test_rootdn_access_specific_time...
Passed suites/plugins/rootdn_plugin_test.py::test_rootdn_access_day_of_week 0.03
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:36 Cleaning up the config area
-------------------------------Captured log call--------------------------------
INFO  tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:185 Running test_rootdn_access_day_of_week... INFO  tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:202 Today: Thu INFO  tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:203 Allowed days: Thu,Wed INFO  tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:204 Deny days: Sun, Mon
Passed suites/plugins/rootdn_plugin_test.py::test_rootdn_access_denied_ip 0.03
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:36 Cleaning up the config area
-------------------------------Captured log call--------------------------------
INFO  tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:268 Running test_rootdn_access_denied_ip...
Passed suites/plugins/rootdn_plugin_test.py::test_rootdn_access_denied_host 2.57
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:36 Cleaning up the config area
-------------------------------Captured log call--------------------------------
INFO  tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:333 Running test_rootdn_access_denied_host...
Passed suites/plugins/rootdn_plugin_test.py::test_rootdn_access_allowed_ip 0.03
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:36 Cleaning up the config area
-------------------------------Captured log call--------------------------------
INFO  tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:400 Running test_rootdn_access_allowed_ip...
Passed suites/plugins/rootdn_plugin_test.py::test_rootdn_access_allowed_host 2.55
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:36 Cleaning up the config area
-------------------------------Captured log call--------------------------------
INFO  tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:466 Running test_rootdn_access_allowed_host...
Passed suites/plugins/rootdn_plugin_test.py::test_rootdn_config_validate 0.02
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:36 Cleaning up the config area
-------------------------------Captured log call--------------------------------
INFO  tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:575 Add just "rootdn-open-time"
Passed suites/plugins/rootdn_plugin_test.py::test_rootdn_access_denied_ip_wildcard 0.02
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:36 Cleaning up the config area
-------------------------------Captured log call--------------------------------
INFO  tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:666 Running test_rootdn_access_denied_ip_wildcard...
Passed suites/plugins/rootdn_plugin_test.py::test_rootdn_access_allowed_ip_wildcard 0.53
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:36 Cleaning up the config area
-------------------------------Captured log call--------------------------------
INFO  tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:712 Running test_rootdn_access_allowed_ip...
Passed suites/psearch/psearch_test.py::test_psearch 2.02
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  lib389:psearch_test.py:30 dc=example,dc=com has changed! INFO  lib389:psearch_test.py:30 ou=groups,dc=example,dc=com has changed! INFO  lib389:psearch_test.py:30 ou=people,dc=example,dc=com has changed! INFO  lib389:psearch_test.py:30 ou=permissions,dc=example,dc=com has changed! INFO  lib389:psearch_test.py:30 ou=services,dc=example,dc=com has changed! INFO  lib389:psearch_test.py:30 uid=demo_user,ou=people,dc=example,dc=com has changed! INFO  lib389:psearch_test.py:30 cn=demo_group,ou=groups,dc=example,dc=com has changed! INFO  lib389:psearch_test.py:30 cn=group_admin,ou=permissions,dc=example,dc=com has changed! INFO  lib389:psearch_test.py:30 cn=group_modify,ou=permissions,dc=example,dc=com has changed! INFO  lib389:psearch_test.py:30 cn=user_admin,ou=permissions,dc=example,dc=com has changed! INFO  lib389:psearch_test.py:30 cn=user_modify,ou=permissions,dc=example,dc=com has changed! INFO  lib389:psearch_test.py:30 cn=user_passwd_reset,ou=permissions,dc=example,dc=com has changed! INFO  lib389:psearch_test.py:30 cn=user_private_read,ou=permissions,dc=example,dc=com has changed! INFO  lib389:psearch_test.py:34 No more results INFO  lib389:psearch_test.py:30 cn=group1,ou=groups,dc=example,dc=com has changed! INFO  lib389:psearch_test.py:34 No more results
Passed suites/pwp_storage/storage_test.py::test_check_password_scheme[CRYPT] 0.30
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/pwp_storage/storage_test.py::test_check_password_scheme[SHA] 0.03
No log output captured.
Passed suites/pwp_storage/storage_test.py::test_check_password_scheme[SSHA] 0.03
No log output captured.
Passed suites/pwp_storage/storage_test.py::test_check_password_scheme[SHA256] 0.04
No log output captured.
Passed suites/pwp_storage/storage_test.py::test_check_password_scheme[SSHA256] 0.04
No log output captured.
Passed suites/pwp_storage/storage_test.py::test_check_password_scheme[SHA384] 0.28
No log output captured.
Passed suites/pwp_storage/storage_test.py::test_check_password_scheme[SSHA384] 0.03
No log output captured.
Passed suites/pwp_storage/storage_test.py::test_check_password_scheme[SHA512] 0.04
No log output captured.
Passed suites/pwp_storage/storage_test.py::test_check_password_scheme[SSHA512] 0.04
No log output captured.
Passed suites/pwp_storage/storage_test.py::test_check_password_scheme[MD5] 0.04
No log output captured.
Passed suites/pwp_storage/storage_test.py::test_check_password_scheme[PBKDF2_SHA256] 0.07
No log output captured.
Passed suites/pwp_storage/storage_test.py::test_clear_scheme 0.03
No log output captured.
Passed suites/pwp_storage/storage_test.py::test_check_two_scheme 4.91
No log output captured.
Passed suites/pwp_storage/storage_test.py::test_check_pbkdf2_sha256 4.60
No log output captured.
Passed suites/pwp_storage/storage_test.py::test_check_ssha512 5.80
No log output captured.
Passed suites/referint_plugin/rename_test.py::test_rename_large_subtree 158.67
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c0516bfb-0a00-4f56-81a7-f3a2570f95b3 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 99e9e353-ccf8-4923-9106-1b975d64849d / got description=c0516bfb-0a00-4f56-81a7-f3a2570f95b3) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0bd3d97c-ec4f-45b1-8196-36a3d2f816e6 / got description=99e9e353-ccf8-4923-9106-1b975d64849d) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0bd3d97c-ec4f-45b1-8196-36a3d2f816e6 / got description=99e9e353-ccf8-4923-9106-1b975d64849d) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0bd3d97c-ec4f-45b1-8196-36a3d2f816e6 / got description=99e9e353-ccf8-4923-9106-1b975d64849d) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0bd3d97c-ec4f-45b1-8196-36a3d2f816e6 / got description=99e9e353-ccf8-4923-9106-1b975d64849d) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0bd3d97c-ec4f-45b1-8196-36a3d2f816e6 / got description=99e9e353-ccf8-4923-9106-1b975d64849d) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0bd3d97c-ec4f-45b1-8196-36a3d2f816e6 / got description=99e9e353-ccf8-4923-9106-1b975d64849d) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0bd3d97c-ec4f-45b1-8196-36a3d2f816e6 / got description=99e9e353-ccf8-4923-9106-1b975d64849d) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0bd3d97c-ec4f-45b1-8196-36a3d2f816e6 / got description=99e9e353-ccf8-4923-9106-1b975d64849d) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0bd3d97c-ec4f-45b1-8196-36a3d2f816e6 / got description=99e9e353-ccf8-4923-9106-1b975d64849d) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0bd3d97c-ec4f-45b1-8196-36a3d2f816e6 / got description=99e9e353-ccf8-4923-9106-1b975d64849d) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0bd3d97c-ec4f-45b1-8196-36a3d2f816e6 / got description=99e9e353-ccf8-4923-9106-1b975d64849d) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0bd3d97c-ec4f-45b1-8196-36a3d2f816e6 / got description=99e9e353-ccf8-4923-9106-1b975d64849d) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0bd3d97c-ec4f-45b1-8196-36a3d2f816e6 / got description=99e9e353-ccf8-4923-9106-1b975d64849d) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0bd3d97c-ec4f-45b1-8196-36a3d2f816e6 / got description=99e9e353-ccf8-4923-9106-1b975d64849d) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0bd3d97c-ec4f-45b1-8196-36a3d2f816e6 / got description=99e9e353-ccf8-4923-9106-1b975d64849d) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c1d2ab25-8491-4766-87d1-156380c5da60 / got description=0bd3d97c-ec4f-45b1-8196-36a3d2f816e6) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c1d2ab25-8491-4766-87d1-156380c5da60 / got description=0bd3d97c-ec4f-45b1-8196-36a3d2f816e6) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c1d2ab25-8491-4766-87d1-156380c5da60 / got description=0bd3d97c-ec4f-45b1-8196-36a3d2f816e6) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c1d2ab25-8491-4766-87d1-156380c5da60 / got description=0bd3d97c-ec4f-45b1-8196-36a3d2f816e6) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c1d2ab25-8491-4766-87d1-156380c5da60 / got description=0bd3d97c-ec4f-45b1-8196-36a3d2f816e6) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c1d2ab25-8491-4766-87d1-156380c5da60 / got description=0bd3d97c-ec4f-45b1-8196-36a3d2f816e6) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c1d2ab25-8491-4766-87d1-156380c5da60 / got description=0bd3d97c-ec4f-45b1-8196-36a3d2f816e6) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c1d2ab25-8491-4766-87d1-156380c5da60 / got description=0bd3d97c-ec4f-45b1-8196-36a3d2f816e6) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c1d2ab25-8491-4766-87d1-156380c5da60 / got description=0bd3d97c-ec4f-45b1-8196-36a3d2f816e6) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c1d2ab25-8491-4766-87d1-156380c5da60 / got description=0bd3d97c-ec4f-45b1-8196-36a3d2f816e6) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c1d2ab25-8491-4766-87d1-156380c5da60 / got description=0bd3d97c-ec4f-45b1-8196-36a3d2f816e6) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c1d2ab25-8491-4766-87d1-156380c5da60 / got description=0bd3d97c-ec4f-45b1-8196-36a3d2f816e6) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c1d2ab25-8491-4766-87d1-156380c5da60 / got description=0bd3d97c-ec4f-45b1-8196-36a3d2f816e6) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working
Passed suites/replication/acceptance_test.py::test_add_entry 10.02
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master3 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'master3', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master4 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39004, 'ldap-secureport': 63704, 'server-id': 'master4', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 4255c64d-f32c-4996-859d-08d7a85e78ab / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect bb329ae6-2a7b-445b-b1f2-e6e954584e4a / got description=4255c64d-f32c-4996-859d-08d7a85e78ab) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:156 Joining master master3 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 7c15b0dd-fd07-437c-a618-8557cb544115 / got description=bb329ae6-2a7b-445b-b1f2-e6e954584e4a) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 8d128a32-1b27-435c-acdc-ea44c1373be1 / got description=7c15b0dd-fd07-437c-a618-8557cb544115) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 INFO  lib389.topologies:topologies.py:156 Joining master master4 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect da0a7c96-6eac-49e2-84d2-d69d96caeb97 / got description=8d128a32-1b27-435c-acdc-ea44c1373be1) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect a9f0d8ac-8b39-4b4e-a87a-0c6b40ce903e / got description=da0a7c96-6eac-49e2-84d2-d69d96caeb97) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master3 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master4 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master3 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master4 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master2 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master4 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master4 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master4 to master2 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master4 to master3 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created INFO  tests.suites.replication.acceptance_test:acceptance_test.py:41 Adding entry uid=mmrepl_test,dc=example,dc=com
Passed suites/replication/acceptance_test.py::test_modify_entry 3.33
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.replication.acceptance_test:acceptance_test.py:41 Adding entry uid=mmrepl_test,dc=example,dc=com INFO  tests.suites.replication.acceptance_test:acceptance_test.py:45 Deleting entry uid=mmrepl_test,dc=example,dc=com
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.acceptance_test:acceptance_test.py:128 Modifying entry uid=mmrepl_test,dc=example,dc=com - add operation INFO  tests.suites.replication.acceptance_test:acceptance_test.py:138 Modifying entry uid=mmrepl_test,dc=example,dc=com - replace operation INFO  tests.suites.replication.acceptance_test:acceptance_test.py:146 Modifying entry uid=mmrepl_test,dc=example,dc=com - delete operation
Passed suites/replication/acceptance_test.py::test_delete_entry 10.03
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.replication.acceptance_test:acceptance_test.py:41 Adding entry uid=mmrepl_test,dc=example,dc=com INFO  tests.suites.replication.acceptance_test:acceptance_test.py:45 Deleting entry uid=mmrepl_test,dc=example,dc=com
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.acceptance_test:acceptance_test.py:168 Deleting entry uid=mmrepl_test,dc=example,dc=com during the test
Passed suites/replication/acceptance_test.py::test_modrdn_entry[0] 20.07
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.replication.acceptance_test:acceptance_test.py:41 Adding entry uid=mmrepl_test,dc=example,dc=com
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.acceptance_test:acceptance_test.py:192 Modify entry RDN uid=mmrepl_test,dc=example,dc=com INFO  tests.suites.replication.acceptance_test:acceptance_test.py:211 Remove entry with new RDN uid=newrdn,dc=example,dc=com
Passed suites/replication/acceptance_test.py::test_modrdn_entry[1] 20.09
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.replication.acceptance_test:acceptance_test.py:41 Adding entry uid=mmrepl_test,dc=example,dc=com
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.acceptance_test:acceptance_test.py:192 Modify entry RDN uid=mmrepl_test,dc=example,dc=com INFO  tests.suites.replication.acceptance_test:acceptance_test.py:211 Remove entry with new RDN uid=newrdn,dc=example,dc=com
Passed suites/replication/acceptance_test.py::test_modrdn_after_pause 14.54
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.acceptance_test:acceptance_test.py:237 Adding entry uid=mmrepl_test,dc=example,dc=com INFO  tests.suites.replication.acceptance_test:acceptance_test.py:252 Pause all replicas INFO  tests.suites.replication.acceptance_test:acceptance_test.py:255 Modify entry RDN uid=mmrepl_test,dc=example,dc=com INFO  tests.suites.replication.acceptance_test:acceptance_test.py:263 Resume all replicas INFO  tests.suites.replication.acceptance_test:acceptance_test.py:266 Wait for replication to happen INFO  tests.suites.replication.acceptance_test:acceptance_test.py:273 Remove entry with new RDN uid=newrdn,dc=example,dc=com
Passed suites/replication/acceptance_test.py::test_modify_stripattrs 0.02
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.acceptance_test:acceptance_test.py:295 Modify nsds5replicastripattrs with b'modifiersname modifytimestamp' INFO  tests.suites.replication.acceptance_test:acceptance_test.py:298 Check nsds5replicastripattrs for b'modifiersname modifytimestamp'
Passed suites/replication/acceptance_test.py::test_new_suffix 11.02
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.replication.acceptance_test:acceptance_test.py:62 Adding suffix:o=test_repl and backend: repl_base to master1 INFO  lib389:backend.py:80 List backend with suffix=o=test_repl INFO  lib389:backend.py:290 Creating a local backend INFO  lib389:backend.py:76 List backend cn=repl_base,cn=ldbm database,cn=plugins,cn=config INFO  lib389:__init__.py:1713 Found entry dn: cn=repl_base,cn=ldbm database,cn=plugins,cn=config cn: repl_base nsslapd-cachememsize: 512000 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-master1/db/repl_base nsslapd-dncachememsize: 16777216 nsslapd-readonly: off nsslapd-require-index: off nsslapd-require-internalop-index: off nsslapd-suffix: o=test_repl objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance INFO  lib389:mappingTree.py:154 Entry dn: cn="o=test_repl",cn=mapping tree,cn=config cn: o=test_repl nsslapd-backend: repl_base nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree INFO  lib389:__init__.py:1713 Found entry dn: cn=o\3Dtest_repl,cn=mapping tree,cn=config cn: o=test_repl nsslapd-backend: repl_base nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree INFO  tests.suites.replication.acceptance_test:acceptance_test.py:62 Adding suffix:o=test_repl and backend: repl_base to master2 INFO  lib389:backend.py:80 List backend with suffix=o=test_repl INFO  lib389:backend.py:290 Creating a local backend INFO  lib389:backend.py:76 List backend cn=repl_base,cn=ldbm database,cn=plugins,cn=config INFO  lib389:__init__.py:1713 Found entry dn: cn=repl_base,cn=ldbm database,cn=plugins,cn=config cn: repl_base nsslapd-cachememsize: 512000 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-master2/db/repl_base nsslapd-dncachememsize: 16777216 nsslapd-readonly: off nsslapd-require-index: off nsslapd-require-internalop-index: off nsslapd-suffix: o=test_repl objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance INFO  lib389:mappingTree.py:154 Entry dn: cn="o=test_repl",cn=mapping tree,cn=config cn: o=test_repl nsslapd-backend: repl_base nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree INFO  lib389:__init__.py:1713 Found entry dn: cn=o\3Dtest_repl,cn=mapping tree,cn=config cn: o=test_repl nsslapd-backend: repl_base nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree INFO  tests.suites.replication.acceptance_test:acceptance_test.py:62 Adding suffix:o=test_repl and backend: repl_base to master3 INFO  lib389:backend.py:80 List backend with suffix=o=test_repl INFO  lib389:backend.py:290 Creating a local backend INFO  lib389:backend.py:76 List backend cn=repl_base,cn=ldbm database,cn=plugins,cn=config INFO  lib389:__init__.py:1713 Found entry dn: cn=repl_base,cn=ldbm database,cn=plugins,cn=config cn: repl_base nsslapd-cachememsize: 512000 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-master3/db/repl_base nsslapd-dncachememsize: 16777216 nsslapd-readonly: off nsslapd-require-index: off nsslapd-require-internalop-index: off nsslapd-suffix: o=test_repl objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance INFO  lib389:mappingTree.py:154 Entry dn: cn="o=test_repl",cn=mapping tree,cn=config cn: o=test_repl nsslapd-backend: repl_base nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree INFO  lib389:__init__.py:1713 Found entry dn: cn=o\3Dtest_repl,cn=mapping tree,cn=config cn: o=test_repl nsslapd-backend: repl_base nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree INFO  tests.suites.replication.acceptance_test:acceptance_test.py:62 Adding suffix:o=test_repl and backend: repl_base to master4 INFO  lib389:backend.py:80 List backend with suffix=o=test_repl INFO  lib389:backend.py:290 Creating a local backend INFO  lib389:backend.py:76 List backend cn=repl_base,cn=ldbm database,cn=plugins,cn=config INFO  lib389:__init__.py:1713 Found entry dn: cn=repl_base,cn=ldbm database,cn=plugins,cn=config cn: repl_base nsslapd-cachememsize: 512000 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-master4/db/repl_base nsslapd-dncachememsize: 16777216 nsslapd-readonly: off nsslapd-require-index: off nsslapd-require-internalop-index: off nsslapd-suffix: o=test_repl objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance INFO  lib389:mappingTree.py:154 Entry dn: cn="o=test_repl",cn=mapping tree,cn=config cn: o=test_repl nsslapd-backend: repl_base nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree INFO  lib389:__init__.py:1713 Found entry dn: cn=o\3Dtest_repl,cn=mapping tree,cn=config cn: o=test_repl nsslapd-backend: repl_base nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 56d4c2b1-126e-40be-947e-9b2794b23f26 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 4529e1ef-a219-4c15-afcd-317be62f4dc8 / got description=56d4c2b1-126e-40be-947e-9b2794b23f26) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 647ed267-2f52-4135-a7e2-914adff01bf8 / got description=4529e1ef-a219-4c15-afcd-317be62f4dc8) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 75845a1f-6cac-4f5f-bfc5-a24d2c481fdc / got description=647ed267-2f52-4135-a7e2-914adff01bf8) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working
Passed suites/replication/acceptance_test.py::test_many_attrs 20.12
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.replication.acceptance_test:acceptance_test.py:41 Adding entry uid=mmrepl_test,dc=example,dc=com
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.acceptance_test:acceptance_test.py:353 Modifying entry uid=mmrepl_test,dc=example,dc=com - 10 add operations INFO  tests.suites.replication.acceptance_test:acceptance_test.py:357 Check that everything was properly replicated after an add operation INFO  tests.suites.replication.acceptance_test:acceptance_test.py:362 Modifying entry uid=mmrepl_test,dc=example,dc=com - 4 delete operations for [b'test0', b'test4', b'test7', b'test9'] INFO  tests.suites.replication.acceptance_test:acceptance_test.py:366 Check that everything was properly replicated after a delete operation
Passed suites/replication/acceptance_test.py::test_double_delete 10.03
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.replication.acceptance_test:acceptance_test.py:41 Adding entry uid=mmrepl_test,dc=example,dc=com INFO  tests.suites.replication.acceptance_test:acceptance_test.py:45 Deleting entry uid=mmrepl_test,dc=example,dc=com
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.acceptance_test:acceptance_test.py:386 Deleting entry uid=mmrepl_test,dc=example,dc=com from master1 INFO  tests.suites.replication.acceptance_test:acceptance_test.py:389 Deleting entry uid=mmrepl_test,dc=example,dc=com from master2 INFO  tests.suites.replication.acceptance_test:acceptance_test.py:393 Entry uid=mmrepl_test,dc=example,dc=com wasn't found master2. It is expected. INFO  tests.suites.replication.acceptance_test:acceptance_test.py:395 Make searches to check if server is alive
Passed suites/replication/acceptance_test.py::test_password_repl_error 20.20
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.replication.acceptance_test:acceptance_test.py:41 Adding entry uid=mmrepl_test,dc=example,dc=com
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.acceptance_test:acceptance_test.py:416 Clean the error log INFO  tests.suites.replication.acceptance_test:acceptance_test.py:419 Set replication loglevel INFO  tests.suites.replication.acceptance_test:acceptance_test.py:422 Modifying entry uid=mmrepl_test,dc=example,dc=com - change userpassword on master 2 INFO  tests.suites.replication.acceptance_test:acceptance_test.py:430 Restart the servers to flush the logs INFO  tests.suites.replication.acceptance_test:acceptance_test.py:439 Check the error log for the error with uid=mmrepl_test,dc=example,dc=com
Passed suites/replication/acceptance_test.py::test_invalid_agmt 0.08
-------------------------------Captured log call--------------------------------
CRITICAL lib389:agreement.py:1026 Failed to add replication agreement: {'msgtype': 105, 'msgid': 5, 'result': 53, 'desc': 'Server is unwilling to perform', 'ctrls': []} INFO  lib389:acceptance_test.py:464 Invalid repl agreement correctly rejected
Passed suites/replication/acceptance_test.py::test_warining_for_invalid_replica 0.02
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.acceptance_test:acceptance_test.py:494 Set nsds5ReplicaBackoffMin to 20 INFO  tests.suites.replication.acceptance_test:acceptance_test.py:497 Set nsds5ReplicaBackoffMax to 10 INFO  tests.suites.replication.acceptance_test:acceptance_test.py:499 Resetting configuration: nsds5ReplicaBackoffMin INFO  tests.suites.replication.acceptance_test:acceptance_test.py:501 Check the error log for the error
Passed suites/replication/acceptance_test.py::test_csngen_task 11.02
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 1860ce6e-8fa3-4982-99c6-e7574253a901 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect a5495449-4959-4aa8-9215-c10ecc4100d1 / got description=1860ce6e-8fa3-4982-99c6-e7574253a901) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.acceptance_test:acceptance_test.py:523 Check the error log contains strings showing csn generator is tested
Passed suites/replication/acceptance_test.py::test_csnpurge_large_valueset 6.43
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.acceptance_test:acceptance_test.py:574 nsds5ReplicaPurgeDelay to 5
Passed suites/replication/acceptance_test.py::test_urp_trigger_substring_search 611.16
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.acceptance_test:acceptance_test.py:604 Set nsslapd-plugin-logging to on INFO  tests.suites.replication.acceptance_test:acceptance_test.py:634 Entry not yet replicated on M2, wait a bit INFO  tests.suites.replication.acceptance_test:acceptance_test.py:634 Entry not yet replicated on M2, wait a bit INFO  tests.suites.replication.acceptance_test:acceptance_test.py:638 Check that on M2, URP as not triggered such internal search INFO  tests.suites.replication.acceptance_test:acceptance_test.py:641 found line: []
Passed suites/replication/cascading_test.py::test_basic_with_hub 16.46
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for hub1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39101, 'ldap-secureport': 63801, 'server-id': 'hub1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for consumer1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:524 Creating replication topology. INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39101 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39101 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39101 is NOT working (expect 7f1037f9-be06-44bc-939c-bcf0f2f11095 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39101 is working INFO  lib389.replica:replica.py:2211 SUCCESS: joined consumer from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39101 INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39101 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is was created INFO  lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39101 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 300e6913-f4b8-4af5-8210-89d70d349d58 / got description=7f1037f9-be06-44bc-939c-bcf0f2f11095) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is working
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.cascading_test:cascading_test.py:45 update cn=101,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config to add nsDS5ReplicatedAttributeListTotal INFO  tests.suites.replication.cascading_test:cascading_test.py:45 update cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config to add nsDS5ReplicatedAttributeListTotal INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 210a4cf0-ed4d-4869-a9b0-040daca30864 / got description=300e6913-f4b8-4af5-8210-89d70d349d58) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect a7f91639-ea88-409c-8bd9-dd7f26be1b16 / got description=210a4cf0-ed4d-4869-a9b0-040daca30864) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect e0565612-51d3-4cff-a726-acce20974bf9 / got description=a7f91639-ea88-409c-8bd9-dd7f26be1b16) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is working
Passed suites/replication/changelog_encryption_test.py::test_cl_encryption_setup_process 29.13
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for consumer1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:169 Joining consumer consumer1 from master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 7453b724-7144-447b-9be9-04dddece16ce / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is working INFO  lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 INFO  lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 already exists
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.changelog_encryption_test:changelog_encryption_test.py:43 Enable TLS ... INFO  tests.suites.replication.changelog_encryption_test:changelog_encryption_test.py:48 Export changelog ... INFO  tests.suites.replication.changelog_encryption_test:changelog_encryption_test.py:55 Enable changelog encryption ... INFO  tests.suites.replication.changelog_encryption_test:changelog_encryption_test.py:68 Import changelog ... INFO  tests.suites.replication.changelog_encryption_test:changelog_encryption_test.py:73 Test replication is still working ...
Passed suites/replication/changelog_test.py::test_dsconf_dump_changelog_files_removed 5.56
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect be18d03a-db7c-4419-9cd0-5776b437a976 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ba02229d-9382-45fe-a972-64cdaefec135 / got description=be18d03a-db7c-4419-9cd0-5776b437a976) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.changelog_test:changelog_test.py:222 Remove .ldif files, if present in: /var/lib/dirsrv/slapd-master1/ldif INFO  tests.suites.replication.changelog_test:changelog_test.py:222 Remove .done files, if present in: /var/lib/dirsrv/slapd-master1/ldif INFO  tests.suites.replication.changelog_test:changelog_test.py:52 Adding user replusr INFO  tests.suites.replication.changelog_test:changelog_test.py:65 Modify RDN of user uid=replusr,ou=People,dc=example,dc=com INFO  tests.suites.replication.changelog_test:changelog_test.py:72 Deleting user: uid=cl5usr,ou=people,dc=example,dc=com INFO  tests.suites.replication.changelog_test:changelog_test.py:386 Use dsconf export-changelog with invalid parameters INFO  tests.suites.replication.changelog_test:changelog_test.py:388 Command used : ['/usr/sbin/dsconf', 'ldap://LOCALHOST:39001', '-D', 'cn=Directory Manager', '-w', 'badpasswd', 'replication', 'export-changelog'] INFO  tests.suites.replication.changelog_test:changelog_test.py:391 output message : b'No action provided, here is some --help.\nusage: dsconf [-h] [-v] [-D BINDDN] [-w BINDPW] [-W] [-y PWDFILE] [-b BASEDN]\n [-Z] [-j]\n instance\n {backend,backup,chaining,config,directory_manager,monitor,plugin,pwpolicy,localpwp,replication,repl-agmt,repl-winsync-agmt,repl-tasks,sasl,security,schema,repl-conflict}\n ...\n\npositional arguments:\n instance The instance name OR the LDAP url to connect to, IE\n localhost, ldap://mai.example.com:389\n {backend,backup,chaining,config,directory_manager,monitor,plugin,pwpolicy,localpwp,replication,repl-agmt,repl-winsync-agmt,repl-tasks,sasl,security,schema,repl-conflict}\n resources to act upon\n backend Manage database suffixes and backends\n backup Manage online backups\n chaining Manage database chaining/database links\n config Manage server configuration\n directory_manager Manage the directory manager account\n monitor Monitor the state of the instance\n plugin Manage plugins available on the server\n pwpolicy Get and set the global password policy settings\n localpwp Manage local (user/subtree) password policies\n replication Configure replication for a suffix\n repl-agmt Manage replication agreements\n repl-winsync-agmt Manage Winsync Agreements\n repl-tasks Manage replication tasks\n sasl Query and manipulate SASL mappings\n security Query and manipulate security options\n schema Query and manipulate schema\n repl-conflict Manage replication conflicts\n\noptional arguments:\n -h, --help show this help message and exit\n -v, --verbose Display verbose operation tracing during command\n execution\n -D BINDDN, --binddn BINDDN\n The account to bind as for executing operations\n -w BINDPW, --bindpw BINDPW\n Password for binddn\n -W, --prompt Prompt for password for the bind DN\n -y PWDFILE, --pwdfile PWDFILE\n Specifies a file containing the password for the\n binddn\n -b BASEDN, --basedn BASEDN\n Basedn (root naming context) of the instance to manage\n -Z, --starttls Connect with StartTLS\n -j, --json Return result in JSON object\n' INFO  tests.suites.replication.changelog_test:changelog_test.py:396 Use dsconf replication changelog without -l option: no generated ldif files should be present in /var/lib/dirsrv/slapd-master1/ldif INFO  tests.suites.replication.changelog_test:changelog_test.py:399 Command used : ['/usr/sbin/dsconf', 'ldap://LOCALHOST:39001', '-D', 'cn=Directory Manager', '-w', 'password', 'replication', 'export-changelog', 'default', '-r', 'dc=example,dc=com'] INFO  tests.suites.replication.changelog_test:changelog_test.py:404 Wait for all dsconf export-changelog files to be generated INFO  tests.suites.replication.changelog_test:changelog_test.py:407 Check if dsconf export-changelog generated .ldif.done files are present - should not INFO  tests.suites.replication.changelog_test:changelog_test.py:413 All dsconf export-changelog generated .ldif files have been successfully removed from /var/lib/dirsrv/slapd-master1/ldif INFO  tests.suites.replication.changelog_test:changelog_test.py:416 Use dsconf replication changelog with -l option: generated ldif files should be kept in /var/lib/dirsrv/slapd-master1/ldif INFO  tests.suites.replication.changelog_test:changelog_test.py:419 Command used : ['/usr/sbin/dsconf', 'ldap://LOCALHOST:39001', '-D', 'cn=Directory Manager', '-w', 'password', 'replication', 'export-changelog', 'to-ldif', '-o', '/var/lib/dirsrv/slapd-master1/ldif/test.ldif', '-r', 'dc=example,dc=com', '-l'] INFO  tests.suites.replication.changelog_test:changelog_test.py:424 Wait for all dsconf export-changelog files to be generated INFO  tests.suites.replication.changelog_test:changelog_test.py:427 Check if dsconf export-changelog generated .ldif.done files are present - should be INFO  tests.suites.replication.changelog_test:changelog_test.py:431 Success : ldif file /var/lib/dirsrv/slapd-master1/ldif/6bd46a04-199011eb-b39de0cf-5e9a310e_cl.ldif.done is present
Passed suites/replication/changelog_test.py::test_verify_changelog 0.14
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.changelog_test:changelog_test.py:454 LDAP operations add, modify, modrdn and delete INFO  tests.suites.replication.changelog_test:changelog_test.py:52 Adding user replusr INFO  tests.suites.replication.changelog_test:changelog_test.py:65 Modify RDN of user uid=replusr,ou=People,dc=example,dc=com INFO  tests.suites.replication.changelog_test:changelog_test.py:72 Deleting user: uid=cl5usr,ou=people,dc=example,dc=com INFO  tests.suites.replication.changelog_test:changelog_test.py:79 Dump changelog using nss5task and check if ldap operations are logged INFO  tests.suites.replication.changelog_test:changelog_test.py:88 Remove ldif files, if present in: /var/lib/dirsrv/slapd-master1/ldif INFO  tests.suites.replication.changelog_test:changelog_test.py:99 No existing changelog ldif files present INFO  tests.suites.replication.changelog_test:changelog_test.py:101 Running nsds5task to dump changelog database to a file INFO  tests.suites.replication.changelog_test:changelog_test.py:104 Check if changelog ldif file exist in: /var/lib/dirsrv/slapd-master1/ldif INFO  tests.suites.replication.changelog_test:changelog_test.py:108 Changelog ldif file exist: /var/lib/dirsrv/slapd-master1/ldif/6bd46a04-199011eb-b39de0cf-5e9a310e_cl.ldif INFO  tests.suites.replication.changelog_test:changelog_test.py:118 Checking changelog ldif file for ldap operations INFO  tests.suites.replication.changelog_test:changelog_test.py:123 Checking if all required changetype operations are present INFO  tests.suites.replication.changelog_test:changelog_test.py:129 Valid ldap operations: {'modify', 'add', 'modrdn', 'delete'} INFO  tests.suites.replication.changelog_test:changelog_test.py:130 Ldap operations found: {'modify', 'add', 'modrdn', 'delete'}
Passed suites/replication/changelog_test.py::test_verify_changelog_online_backup 5.25
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.changelog_test:changelog_test.py:481 Run db2bak script to take database backup INFO  lib389:tasks.py:619 Backup task backup_10282020_224308 completed successfully INFO  tests.suites.replication.changelog_test:changelog_test.py:493 Database backup is created successfully INFO  tests.suites.replication.changelog_test:changelog_test.py:498 Run bak2db to restore directory server INFO  lib389:tasks.py:673 Restore task restore_10282020_224310 completed successfully INFO  tests.suites.replication.changelog_test:changelog_test.py:505 LDAP operations add, modify, modrdn and delete INFO  tests.suites.replication.changelog_test:changelog_test.py:52 Adding user replusr INFO  tests.suites.replication.changelog_test:changelog_test.py:65 Modify RDN of user uid=replusr,ou=People,dc=example,dc=com INFO  tests.suites.replication.changelog_test:changelog_test.py:72 Deleting user: uid=cl5usr,ou=people,dc=example,dc=com INFO  tests.suites.replication.changelog_test:changelog_test.py:79 Dump changelog using nss5task and check if ldap operations are logged INFO  tests.suites.replication.changelog_test:changelog_test.py:88 Remove ldif files, if present in: /var/lib/dirsrv/slapd-master1/ldif INFO  tests.suites.replication.changelog_test:changelog_test.py:97 Existing changelog ldif file: /var/lib/dirsrv/slapd-master1/ldif/6bd46a04-199011eb-b39de0cf-5e9a310e_cl.ldif removed INFO  tests.suites.replication.changelog_test:changelog_test.py:99 No existing changelog ldif files present INFO  tests.suites.replication.changelog_test:changelog_test.py:101 Running nsds5task to dump changelog database to a file INFO  tests.suites.replication.changelog_test:changelog_test.py:104 Check if changelog ldif file exist in: /var/lib/dirsrv/slapd-master1/ldif INFO  tests.suites.replication.changelog_test:changelog_test.py:108 Changelog ldif file exist: /var/lib/dirsrv/slapd-master1/ldif/6bd46a04-199011eb-b39de0cf-5e9a310e_cl.ldif INFO  tests.suites.replication.changelog_test:changelog_test.py:118 Checking changelog ldif file for ldap operations INFO  tests.suites.replication.changelog_test:changelog_test.py:123 Checking if all required changetype operations are present INFO  tests.suites.replication.changelog_test:changelog_test.py:129 Valid ldap operations: {'modify', 'add', 'modrdn', 'delete'} INFO  tests.suites.replication.changelog_test:changelog_test.py:130 Ldap operations found: {'modify', 'add', 'modrdn', 'delete'}
Passed suites/replication/changelog_test.py::test_verify_changelog_offline_backup 5.78
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.changelog_test:changelog_test.py:534 Run db2bak to take database backup INFO  tests.suites.replication.changelog_test:changelog_test.py:541 Run bak2db to restore directory server INFO  tests.suites.replication.changelog_test:changelog_test.py:554 Database backup is created successfully INFO  tests.suites.replication.changelog_test:changelog_test.py:559 LDAP operations add, modify, modrdn and delete INFO  tests.suites.replication.changelog_test:changelog_test.py:52 Adding user replusr INFO  tests.suites.replication.changelog_test:changelog_test.py:65 Modify RDN of user uid=replusr,ou=People,dc=example,dc=com INFO  tests.suites.replication.changelog_test:changelog_test.py:72 Deleting user: uid=cl5usr,ou=people,dc=example,dc=com INFO  tests.suites.replication.changelog_test:changelog_test.py:79 Dump changelog using nss5task and check if ldap operations are logged INFO  tests.suites.replication.changelog_test:changelog_test.py:88 Remove ldif files, if present in: /var/lib/dirsrv/slapd-master1/ldif INFO  tests.suites.replication.changelog_test:changelog_test.py:97 Existing changelog ldif file: /var/lib/dirsrv/slapd-master1/ldif/6bd46a04-199011eb-b39de0cf-5e9a310e_cl.ldif removed INFO  tests.suites.replication.changelog_test:changelog_test.py:99 No existing changelog ldif files present INFO  tests.suites.replication.changelog_test:changelog_test.py:101 Running nsds5task to dump changelog database to a file INFO  tests.suites.replication.changelog_test:changelog_test.py:104 Check if changelog ldif file exist in: /var/lib/dirsrv/slapd-master1/ldif INFO  tests.suites.replication.changelog_test:changelog_test.py:108 Changelog ldif file exist: /var/lib/dirsrv/slapd-master1/ldif/6bd46a04-199011eb-b39de0cf-5e9a310e_cl.ldif INFO  tests.suites.replication.changelog_test:changelog_test.py:118 Checking changelog ldif file for ldap operations INFO  tests.suites.replication.changelog_test:changelog_test.py:123 Checking if all required changetype operations are present INFO  tests.suites.replication.changelog_test:changelog_test.py:129 Valid ldap operations: {'modify', 'add', 'modrdn', 'delete'} INFO  tests.suites.replication.changelog_test:changelog_test.py:130 Ldap operations found: {'modify', 'add', 'modrdn', 'delete'}
Passed suites/replication/changelog_test.py::test_changelog_maxage 0.57
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.replication.changelog_test:changelog_test.py:144 Testing Ticket 47669 - Test duration syntax in the changelogs INFO  lib389:changelog_test.py:147 Bind as cn=Directory Manager
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.changelog_test:changelog_test.py:581 1. Test nsslapd-changelogmaxage in cn=changelog5,cn=config INFO  lib389:changelog_test.py:584 Bind as cn=Directory Manager INFO  tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogmaxage: 12345 -- valid INFO  tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogmaxage: 10s -- valid INFO  tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogmaxage: 30M -- valid INFO  tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogmaxage: 12h -- valid INFO  tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogmaxage: 2D -- valid INFO  tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogmaxage: 4w -- valid INFO  tests.suites.replication.changelog_test:changelog_test.py:181 Test nsslapd-changelogmaxage: -123 -- invalid ERROR  tests.suites.replication.changelog_test:changelog_test.py:186 Expectedly failed to add nsslapd-changelogmaxage: -123 to cn=changelog,cn=userRoot,cn=ldbm database,cn=plugins,cn=config: error Server is unwilling to perform INFO  tests.suites.replication.changelog_test:changelog_test.py:181 Test nsslapd-changelogmaxage: xyz -- invalid ERROR  tests.suites.replication.changelog_test:changelog_test.py:186 Expectedly failed to add nsslapd-changelogmaxage: xyz to cn=changelog,cn=userRoot,cn=ldbm database,cn=plugins,cn=config: error Server is unwilling to perform
Passed suites/replication/changelog_test.py::test_ticket47669_changelog_triminterval 0.06
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.changelog_test:changelog_test.py:614 2. Test nsslapd-changelogtrim-interval in cn=changelog5,cn=config INFO  lib389:changelog_test.py:617 Bind as cn=Directory Manager INFO  tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogtrim-interval: 12345 -- valid INFO  tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogtrim-interval: 10s -- valid INFO  tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogtrim-interval: 30M -- valid INFO  tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogtrim-interval: 12h -- valid INFO  tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogtrim-interval: 2D -- valid INFO  tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogtrim-interval: 4w -- valid INFO  tests.suites.replication.changelog_test:changelog_test.py:181 Test nsslapd-changelogtrim-interval: -123 -- invalid ERROR  tests.suites.replication.changelog_test:changelog_test.py:186 Expectedly failed to add nsslapd-changelogtrim-interval: -123 to cn=changelog,cn=userRoot,cn=ldbm database,cn=plugins,cn=config: error Server is unwilling to perform INFO  tests.suites.replication.changelog_test:changelog_test.py:181 Test nsslapd-changelogtrim-interval: xyz -- invalid ERROR  tests.suites.replication.changelog_test:changelog_test.py:186 Expectedly failed to add nsslapd-changelogtrim-interval: xyz to cn=changelog,cn=userRoot,cn=ldbm database,cn=plugins,cn=config: error Server is unwilling to perform
Passed suites/replication/changelog_test.py::test_retrochangelog_maxage 0.12
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.changelog_test:changelog_test.py:683 4. Test nsslapd-changelogmaxage in cn=Retro Changelog Plugin,cn=plugins,cn=config INFO  lib389:changelog_test.py:686 Bind as cn=Directory Manager INFO  tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogmaxage: 12345 -- valid INFO  tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogmaxage: 10s -- valid INFO  tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogmaxage: 30M -- valid INFO  tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogmaxage: 12h -- valid INFO  tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogmaxage: 2D -- valid INFO  tests.suites.replication.changelog_test:changelog_test.py:174 Test nsslapd-changelogmaxage: 4w -- valid INFO  tests.suites.replication.changelog_test:changelog_test.py:181 Test nsslapd-changelogmaxage: -123 -- invalid INFO  tests.suites.replication.changelog_test:changelog_test.py:181 Test nsslapd-changelogmaxage: xyz -- invalid INFO  lib389:changelog_test.py:698 ticket47669 was successfully verified.
Passed suites/replication/changelog_test.py::test_retrochangelog_trimming_crash 22.04
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.changelog_test:changelog_test.py:722 1. Test retroCL trimming crash in cn=Retro Changelog Plugin,cn=plugins,cn=config INFO  lib389:changelog_test.py:726 ticket50736 start verification INFO  lib389:changelog_test.py:742 ticket 50736 was successfully verified.
Passed suites/replication/changelog_trimming_test.py::test_max_age 14.61
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology.
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.changelog_trimming_test:changelog_trimming_test.py:90 Testing changelog triming interval with max age...
Passed suites/replication/changelog_trimming_test.py::test_max_entries 11.66
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.changelog_trimming_test:changelog_trimming_test.py:133 Testing changelog triming interval with max entries...
Passed suites/replication/cleanallruv_max_tasks_test.py::test_max_tasks 41.07
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master3 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'master3', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master4 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39004, 'ldap-secureport': 63704, 'server-id': 'master4', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 9afdb3ce-6f5d-4ed8-a951-2fe3b015e629 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 43766598-de5a-4e35-8c1b-65c1c4a74a3a / got description=9afdb3ce-6f5d-4ed8-a951-2fe3b015e629) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:156 Joining master master3 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect c363a5ff-8fd4-4f78-ae76-680d36710eed / got description=43766598-de5a-4e35-8c1b-65c1c4a74a3a) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 6186f17f-5734-4ef7-b16b-0cad96b59d30 / got description=c363a5ff-8fd4-4f78-ae76-680d36710eed) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 INFO  lib389.topologies:topologies.py:156 Joining master master4 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ee94f1b9-24d4-4fd1-a278-f59f7933d5c2 / got description=84595f52-0c73-4ac7-b7c6-add3133f5688) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master3 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master4 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master3 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master4 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master2 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master4 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master4 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master4 to master2 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master4 to master3 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created
Passed suites/replication/cleanallruv_test.py::test_clean 2.92
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master3 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'master3', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master4 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39004, 'ldap-secureport': 63704, 'server-id': 'master4', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 9ceb012d-7e59-46ee-88e4-0989d7af632f / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect a3384ced-0080-4c19-9da6-6edeb9f9ace8 / got description=9ceb012d-7e59-46ee-88e4-0989d7af632f) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:156 Joining master master3 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect b1be2009-f732-4982-ba7b-d3c537bd00cc / got description=a3384ced-0080-4c19-9da6-6edeb9f9ace8) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 3a7b8c3d-7540-46ed-afd4-3214ddccf859 / got description=b1be2009-f732-4982-ba7b-d3c537bd00cc) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 INFO  lib389.topologies:topologies.py:156 Joining master master4 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect cf456f2e-c94e-4286-80d9-00bbfa6162d5 / got description=3a7b8c3d-7540-46ed-afd4-3214ddccf859) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 84fd2742-0f81-44d6-8b7f-d5b92123a22f / got description=cf456f2e-c94e-4286-80d9-00bbfa6162d5) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master3 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master4 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master3 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master4 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master2 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master4 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master4 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master4 to master2 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master4 to master3 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created DEBUG  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:153 Wait a bit before the reset - it is required for the slow machines DEBUG  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:155 -------------- BEGIN RESET of m4 ----------------- INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 876cab41-f795-418e-937d-157189fcbaab / got description=84fd2742-0f81-44d6-8b7f-d5b92123a22f) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 236b0e47-b806-412b-8934-8ac8d7f2a8c1 / got description=876cab41-f795-418e-937d-157189fcbaab) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 0383f73a-5c31-4b66-9d40-ff3f81fcddbf / got description=236b0e47-b806-412b-8934-8ac8d7f2a8c1) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect f83626a0-50f5-4a6e-a663-730ae71d4012 / got description=0383f73a-5c31-4b66-9d40-ff3f81fcddbf) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 6a56e29b-3eae-403d-8ada-6f1a5b8d788d / got description=f83626a0-50f5-4a6e-a663-730ae71d4012) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 00a26739-ef71-44e5-87cf-a9c687dc70cf / got description=6a56e29b-3eae-403d-8ada-6f1a5b8d788d) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 6e356729-e385-485b-8216-9d6e74f85935 / got description=00a26739-ef71-44e5-87cf-a9c687dc70cf) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 118321fb-6cd8-4d5d-978b-5e3d8663b5a0 / got description=6e356729-e385-485b-8216-9d6e74f85935) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 87b97a3e-0504-4e83-920b-99e5257b00f2 / got description=118321fb-6cd8-4d5d-978b-5e3d8663b5a0) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ad6f8350-6a54-483f-ba65-99bc69d807fe / got description=87b97a3e-0504-4e83-920b-99e5257b00f2) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 43e629d4-b35f-4a43-88b8-cfe79cd5765b / got description=ad6f8350-6a54-483f-ba65-99bc69d807fe) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect fd91dd20-aa2a-42c3-b9db-beeffd8fbf4e / got description=43e629d4-b35f-4a43-88b8-cfe79cd5765b) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working DEBUG  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:181 -------------- FINISH RESET of m4 -----------------
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:204 Running test_clean... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:207 test_clean: disable master 4... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:71 test_clean: remove all the agreements to master 4... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:211 test_clean: run the cleanAllRUV task... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:221 test_clean: check all the masters have been cleaned... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:1 (suffix:rid) INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:2 (suffix:rid) INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:3 (suffix:rid) INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:225 test_clean PASSED, restoring master 4...
Passed suites/replication/cleanallruv_test.py::test_clean_restart 28.76
-------------------------------Captured log setup-------------------------------
DEBUG  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:153 Wait a bit before the reset - it is required for the slow machines DEBUG  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:155 -------------- BEGIN RESET of m4 ----------------- INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 67719e5b-9d82-4035-ae68-66e49cc00b9d / got description=d4306983-4068-4102-80fd-977796230506) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 7c5efa38-354b-4244-a7bd-f1246efc0216 / got description=67719e5b-9d82-4035-ae68-66e49cc00b9d) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect fe1a36ba-7742-4f4c-af35-8359f1214420 / got description=7c5efa38-354b-4244-a7bd-f1246efc0216) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ba3c7b64-56fa-40eb-9d00-1ebf50fb1731 / got description=fe1a36ba-7742-4f4c-af35-8359f1214420) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect ff8ba57b-521a-4269-aeea-38658dddc145 / got description=ba3c7b64-56fa-40eb-9d00-1ebf50fb1731) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 2395fab2-385b-4e53-a11f-e0319cdcfb52 / got description=ff8ba57b-521a-4269-aeea-38658dddc145) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 994b887b-8e19-49e6-a212-21a8d6e84e34 / got description=2395fab2-385b-4e53-a11f-e0319cdcfb52) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0bf75ab5-babf-4a89-9d7a-4ceb1e5bb9bb / got description=994b887b-8e19-49e6-a212-21a8d6e84e34) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect b5cd8847-7925-4be2-bce1-4da34da2a02f / got description=0bf75ab5-babf-4a89-9d7a-4ceb1e5bb9bb) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 8b458715-12af-4a61-860b-c75b1115111c / got description=b5cd8847-7925-4be2-bce1-4da34da2a02f) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 95489c8b-9a46-439a-854d-ec5c7d1ceb78 / got description=8b458715-12af-4a61-860b-c75b1115111c) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 9eb2b224-a7da-4a9a-984c-6dc0f8012019 / got description=95489c8b-9a46-439a-854d-ec5c7d1ceb78) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working DEBUG  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:181 -------------- FINISH RESET of m4 -----------------
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:256 Running test_clean_restart... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:259 test_clean: disable master 4... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:71 test_clean: remove all the agreements to master 4... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:267 test_clean: run the cleanAllRUV task... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:292 test_clean_restart: check all the masters have been cleaned... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:1 (suffix:rid) INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:2 (suffix:rid) INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:3 (suffix:rid) INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:296 test_clean_restart PASSED, restoring master 4...
Passed suites/replication/cleanallruv_test.py::test_clean_force 24.18
-------------------------------Captured log setup-------------------------------
DEBUG  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:153 Wait a bit before the reset - it is required for the slow machines DEBUG  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:155 -------------- BEGIN RESET of m4 ----------------- INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 6e4a3f6c-77b6-42c2-9140-9603b5f5114d / got description=a2418cff-cadb-4ffd-8c61-77e55c7a0784) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 69f45501-0d58-4d2b-8a8e-6c18fc60f5df / got description=6e4a3f6c-77b6-42c2-9140-9603b5f5114d) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect cba1d4bb-b86b-41fa-8a17-1119f1b4e50e / got description=69f45501-0d58-4d2b-8a8e-6c18fc60f5df) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect b8c94272-7413-4f56-8445-7f15bf7ea2b7 / got description=cba1d4bb-b86b-41fa-8a17-1119f1b4e50e) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 4be7c6d1-c2ff-424e-8776-48c78310d7bd / got description=b8c94272-7413-4f56-8445-7f15bf7ea2b7) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 0932fbe3-948f-4379-9571-8c2277f5eb30 / got description=4be7c6d1-c2ff-424e-8776-48c78310d7bd) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 6f0c5ad0-a68d-4dce-b57f-29baa2ebe620 / got description=0932fbe3-948f-4379-9571-8c2277f5eb30) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b6d8e662-42f9-45ec-aef3-26d4a0a3c5a9 / got description=6f0c5ad0-a68d-4dce-b57f-29baa2ebe620) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect e84345e4-f4da-471c-a29a-242cfcaa4105 / got description=b6d8e662-42f9-45ec-aef3-26d4a0a3c5a9) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect b15955e1-5069-4b92-ab27-96da995587bd / got description=e84345e4-f4da-471c-a29a-242cfcaa4105) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 845ebe94-b52e-43a2-8d50-692bad56b9d9 / got description=b15955e1-5069-4b92-ab27-96da995587bd) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect a9db59a6-dc44-4897-a316-16deb02827e5 / got description=845ebe94-b52e-43a2-8d50-692bad56b9d9) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working DEBUG  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:181 -------------- FINISH RESET of m4 -----------------
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:322 Running test_clean_force... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:71 test_clean_force: remove all the agreements to master 4... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:340 test_clean: run the cleanAllRUV task... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:350 test_clean_force: check all the masters have been cleaned... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:1 (suffix:rid) INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:2 (suffix:rid) INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:3 (suffix:rid) INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:354 test_clean_force PASSED, restoring master 4...
Passed suites/replication/cleanallruv_test.py::test_abort 7.73
-------------------------------Captured log setup-------------------------------
DEBUG  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:153 Wait a bit before the reset - it is required for the slow machines DEBUG  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:155 -------------- BEGIN RESET of m4 ----------------- INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 093eabf2-4c66-43d2-a7d8-7a7183ae767b / got description=2f2e515a-d2a3-4c23-ba02-afab7296fd01) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect e9c17147-a4b3-4190-8f1d-9eedf6e58e05 / got description=093eabf2-4c66-43d2-a7d8-7a7183ae767b) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 849dc9b8-7904-4d86-a371-974e28493287 / got description=e9c17147-a4b3-4190-8f1d-9eedf6e58e05) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 98edfd77-9d51-4b07-a1f8-cfeb72ea3fb5 / got description=849dc9b8-7904-4d86-a371-974e28493287) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 030258eb-bee0-4d4f-8ca8-ae0f7bc9513e / got description=98edfd77-9d51-4b07-a1f8-cfeb72ea3fb5) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 5e37f8b2-891f-4654-a98b-2bec03ae0345 / got description=030258eb-bee0-4d4f-8ca8-ae0f7bc9513e) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ae5533b2-e432-4732-8d0b-8335885c423d / got description=5e37f8b2-891f-4654-a98b-2bec03ae0345) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 7c2cca5f-39ee-43e1-9fe3-096ae2356c25 / got description=ae5533b2-e432-4732-8d0b-8335885c423d) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 0d6e224d-88a9-43ba-b8e1-b6839273d6d1 / got description=7c2cca5f-39ee-43e1-9fe3-096ae2356c25) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 2ae30cf5-8472-4d1d-a49e-68eb0fbf763e / got description=0d6e224d-88a9-43ba-b8e1-b6839273d6d1) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 66b615e7-7fd1-4c94-86c0-d6deecffc902 / got description=2ae30cf5-8472-4d1d-a49e-68eb0fbf763e) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 6a0fc08e-5ada-4a90-a796-5f815494d3a3 / got description=66b615e7-7fd1-4c94-86c0-d6deecffc902) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working DEBUG  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:181 -------------- FINISH RESET of m4 -----------------
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:376 Running test_abort... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:71 test_abort: remove all the agreements to master 4... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:381 test_abort: stop master 2 to freeze the cleanAllRUV task... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:385 test_abort: add the cleanAllRUV task... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:400 test_abort: check master 1 no longer has a cleanAllRUV task... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:406 test_abort: start master 2 to begin the restore process... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:409 test_abort PASSED, restoring master 4...
Passed suites/replication/cleanallruv_test.py::test_abort_restart 27.47
-------------------------------Captured log setup-------------------------------
DEBUG  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:153 Wait a bit before the reset - it is required for the slow machines DEBUG  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:155 -------------- BEGIN RESET of m4 ----------------- INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect e08a21d0-2b10-41db-a2a8-fc323589c153 / got description=b1326c73-257c-4bc1-95cd-7018a85fc278) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 4022a34e-9150-4541-9865-311a8ad0fc9f / got description=e08a21d0-2b10-41db-a2a8-fc323589c153) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 6ed7975d-43a3-44be-8ff1-61e5fa13cc94 / got description=4022a34e-9150-4541-9865-311a8ad0fc9f) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ce815c77-a42a-4b27-81fe-69589227ed4d / got description=6ed7975d-43a3-44be-8ff1-61e5fa13cc94) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect a2a2913f-0b15-4fd9-be3b-938e7c438cb4 / got description=ce815c77-a42a-4b27-81fe-69589227ed4d) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 3755639c-d6a1-4ee7-9b40-53cd270c25bf / got description=a2a2913f-0b15-4fd9-be3b-938e7c438cb4) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 6eda826d-939d-4748-b720-a275c0739e40 / got description=3755639c-d6a1-4ee7-9b40-53cd270c25bf) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 53e67690-03c2-4414-ae5d-8e10bb46eeae / got description=6eda826d-939d-4748-b720-a275c0739e40) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 86dbc8dd-df46-46ca-a5c9-923ddb66aa62 / got description=53e67690-03c2-4414-ae5d-8e10bb46eeae) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect a0e924e4-5813-44d3-bfbf-85f251405dac / got description=86dbc8dd-df46-46ca-a5c9-923ddb66aa62) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 83d0826c-3234-4c69-a3d3-9fdb7751dfe9 / got description=a0e924e4-5813-44d3-bfbf-85f251405dac) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 10c079d3-a647-493c-bce7-f21e92c26173 / got description=83d0826c-3234-4c69-a3d3-9fdb7751dfe9) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working DEBUG  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:181 -------------- FINISH RESET of m4 -----------------
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:441 Running test_abort_restart... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:71 test_abort: remove all the agreements to master 4... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:446 test_abort_restart: stop master 3 to freeze the cleanAllRUV task... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:450 test_abort_restart: add the cleanAllRUV task... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:465 test_abort_abort: check master 1 no longer has a cleanAllRUV task... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:487 test_abort_restart PASSED, restoring master 4...
Passed suites/replication/cleanallruv_test.py::test_abort_certify 36.81
-------------------------------Captured log setup-------------------------------
DEBUG  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:153 Wait a bit before the reset - it is required for the slow machines DEBUG  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:155 -------------- BEGIN RESET of m4 ----------------- INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 2fd8d3e8-74ec-476d-8bdb-0bd2ccb683a9 / got description=aa01ebab-c118-4256-94c1-73904c2de214) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect b5f19583-edf3-421a-96f7-414c43ad2b82 / got description=2fd8d3e8-74ec-476d-8bdb-0bd2ccb683a9) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 548ee202-7d67-4d7e-bcbc-ba6f9b2b9874 / got description=b5f19583-edf3-421a-96f7-414c43ad2b82) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ed17fb8f-334e-4e89-b2d5-f37de6d660a7 / got description=548ee202-7d67-4d7e-bcbc-ba6f9b2b9874) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ed17fb8f-334e-4e89-b2d5-f37de6d660a7 / got description=548ee202-7d67-4d7e-bcbc-ba6f9b2b9874) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ed17fb8f-334e-4e89-b2d5-f37de6d660a7 / got description=548ee202-7d67-4d7e-bcbc-ba6f9b2b9874) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 2a5520f0-d5bb-4571-b1ff-45f147c82ef3 / got description=ed17fb8f-334e-4e89-b2d5-f37de6d660a7) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect a091540f-75df-4666-ada6-34bb8ffb0ad2 / got description=2a5520f0-d5bb-4571-b1ff-45f147c82ef3) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect e406e7d9-9fd1-4fa3-a2d1-b46e299af779 / got description=a091540f-75df-4666-ada6-34bb8ffb0ad2) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 47f1e379-bf51-4578-af80-bb07ebbad48e / got description=e406e7d9-9fd1-4fa3-a2d1-b46e299af779) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 48aa8e43-37b6-4978-913c-a2e9ab0d6b60 / got description=47f1e379-bf51-4578-af80-bb07ebbad48e) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 908d8f67-6342-4533-9ff8-5db15b669647 / got description=48aa8e43-37b6-4978-913c-a2e9ab0d6b60) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 7237b58f-bcfe-4791-8154-9f7d73515193 / got description=908d8f67-6342-4533-9ff8-5db15b669647) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect bead0f9a-6b37-4d5f-8c3d-4f6047a7ad3b / got description=7237b58f-bcfe-4791-8154-9f7d73515193) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working DEBUG  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:181 -------------- FINISH RESET of m4 -----------------
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:509 Running test_abort_certify... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:71 test_abort_certify: remove all the agreements to master 4... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:515 test_abort_certify: stop master 2 to freeze the cleanAllRUV task... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:519 test_abort_certify: add the cleanAllRUV task... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:531 test_abort_certify: abort the cleanAllRUV task... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:535 test_abort_certify... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:542 test_abort_certify: start master 2 to allow the abort task to finish... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:551 test_abort_certify: check master 1 no longer has a cleanAllRUV task... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:556 test_abort_certify PASSED, restoring master 4...
Passed suites/replication/cleanallruv_test.py::test_stress_clean 47.49
-------------------------------Captured log setup-------------------------------
DEBUG  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:153 Wait a bit before the reset - it is required for the slow machines DEBUG  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:155 -------------- BEGIN RESET of m4 ----------------- INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 6078bd82-7cbe-4bb5-91cd-c725d2e6f047 / got description=c282dccb-4116-4b20-9117-c6c6f3b66c1f) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 8201830d-5d86-4efa-b1cd-60dfc7bff15b / got description=6078bd82-7cbe-4bb5-91cd-c725d2e6f047) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 0314f931-125e-4559-a48f-eed9852f8fec / got description=8201830d-5d86-4efa-b1cd-60dfc7bff15b) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 2dbb1a3c-bf8f-4c04-b9b2-333bf8cf2804 / got description=0314f931-125e-4559-a48f-eed9852f8fec) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect cb959cdf-a15e-4bb5-bc36-56d3d6d07e48 / got description=2dbb1a3c-bf8f-4c04-b9b2-333bf8cf2804) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 15b7ab80-c0fa-4541-85af-2ae84fc85c6c / got description=cb959cdf-a15e-4bb5-bc36-56d3d6d07e48) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect f15021ea-1229-45e5-aed3-1b71b3ebf7a5 / got description=15b7ab80-c0fa-4541-85af-2ae84fc85c6c) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b1bbbdfb-9919-4a85-989c-3fdf935d54ca / got description=f15021ea-1229-45e5-aed3-1b71b3ebf7a5) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect fb80dac2-52d9-4b29-9483-458def16137a / got description=b1bbbdfb-9919-4a85-989c-3fdf935d54ca) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect dc45208b-a440-4acb-92a9-5e7876db4fde / got description=61fa67af-6543-4756-ac0e-2eee6f3641a7) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 9998f4e0-5fc1-4579-8536-0d33db700910 / got description=dc45208b-a440-4acb-92a9-5e7876db4fde) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working DEBUG  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:181 -------------- FINISH RESET of m4 -----------------
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:580 Running test_stress_clean... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:581 test_stress_clean: put all the masters under load... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:598 test_stress_clean: allow some time for replication to get flowing... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:604 test_stress_clean: allow some time for master 4 to push changes out (60 seconds)... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:71 test_stress_clean: remove all the agreements to master 4... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:620 test_stress_clean: wait for all the updates to finish... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:627 test_stress_clean: check if all the replicas have been cleaned... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:1 (suffix:rid) INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:2 (suffix:rid) INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:3 (suffix:rid) INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:631 test_stress_clean: PASSED, restoring master 4... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:634 Sleep for 120 seconds to allow replication to complete... INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 07a35f65-3a4c-4413-a5cd-d41cb6703fe3 / got description=9998f4e0-5fc1-4579-8536-0d33db700910) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 0699ea5f-391a-49f0-8c92-c7d0b65ca8b6 / got description=07a35f65-3a4c-4413-a5cd-d41cb6703fe3) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 364fa496-cc5a-4092-8ee6-40b5a459c152 / got description=0699ea5f-391a-49f0-8c92-c7d0b65ca8b6) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 364fa496-cc5a-4092-8ee6-40b5a459c152 / got description=0699ea5f-391a-49f0-8c92-c7d0b65ca8b6) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 79e58fd4-0842-49b3-85ac-f827f499a2aa / got description=364fa496-cc5a-4092-8ee6-40b5a459c152) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 7926e5b2-176a-4c49-b7fc-9ce8fea81026 / got description=79e58fd4-0842-49b3-85ac-f827f499a2aa) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 53dbb770-6ece-4dc8-ae7e-26b9b3a62fc7 / got description=7926e5b2-176a-4c49-b7fc-9ce8fea81026) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working
Passed suites/replication/cleanallruv_test.py::test_multiple_tasks_with_force 47.38
-------------------------------Captured log setup-------------------------------
DEBUG  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:153 Wait a bit before the reset - it is required for the slow machines DEBUG  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:155 -------------- BEGIN RESET of m4 ----------------- INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect e3034ea0-b873-45b7-bc2b-84f249293431 / got description=9af2dbb9-a03b-4e73-8bf3-606e7b40e9b8) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 63a870bd-c635-4b82-a8d9-12116eeb255d / got description=e3034ea0-b873-45b7-bc2b-84f249293431) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect eb0cb5b3-9f6c-47a9-b148-0f1fe83e4015 / got description=63a870bd-c635-4b82-a8d9-12116eeb255d) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 8bb5a0bb-7f11-4775-b273-8743da5acf3b / got description=eb0cb5b3-9f6c-47a9-b148-0f1fe83e4015) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 5b1f84b7-9dcd-4e55-8767-9e19be4453ff / got description=8bb5a0bb-7f11-4775-b273-8743da5acf3b) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 1ea40ad8-1de1-4a4e-b17c-e7149f15b574 / got description=5b1f84b7-9dcd-4e55-8767-9e19be4453ff) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 8f3627e3-b4ab-4db5-ae14-ea0f25891b4e / got description=1ea40ad8-1de1-4a4e-b17c-e7149f15b574) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 681cd436-9b49-4524-986a-ff8dd2223537 / got description=8f3627e3-b4ab-4db5-ae14-ea0f25891b4e) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 8987d49f-e84e-44fe-a8c7-1f39d4b37f95 / got description=681cd436-9b49-4524-986a-ff8dd2223537) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 3da40194-1d97-4026-a030-86cc3c6f4cf7 / got description=8987d49f-e84e-44fe-a8c7-1f39d4b37f95) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 54fdbe32-fffb-4ec9-a71f-a7bd2733db8c / got description=3da40194-1d97-4026-a030-86cc3c6f4cf7) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 09470e40-9a0d-434c-a5e9-b09db9809b76 / got description=54fdbe32-fffb-4ec9-a71f-a7bd2733db8c) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working DEBUG  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:181 -------------- FINISH RESET of m4 -----------------
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:671 Running test_multiple_tasks_with_force... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:71 test_multiple_tasks_with_force: remove all the agreements to master 4... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:690 test_multiple_tasks_with_force: run the cleanAllRUV task with "force" on... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:699 test_multiple_tasks_with_force: run the cleanAllRUV task with "force" off... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:718 test_multiple_tasks_with_force: check all the masters have been cleaned... INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:1 (suffix:rid) INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:2 (suffix:rid) INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:3 (suffix:rid) INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:722 test_abort: check master 1 no longer has a cleanAllRUV task...
Passed suites/replication/cleanallruv_test.py::test_clean_shutdown_crash 34.29
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect eaf86cc0-30a3-42a9-a442-c8ff08c1fdbb / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 33f58873-46c4-4ce6-be08-6116367198ae / got description=eaf86cc0-30a3-42a9-a442-c8ff08c1fdbb) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:771 Enabling TLS INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:774 Creating replication dns INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:782 Changing auth type INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:803 Stopping master2 INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:806 Run the cleanAllRUV task INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:817 Check if master1 crashed INFO  tests.suites.replication.cleanallruv_test:cleanallruv_test.py:820 Repeat
Passed suites/replication/conflict_resolve_test.py::TestTwoMasters::test_add_modrdn 17.29
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c97eb642-d99b-4493-b8b0-ae69d9339d29 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect c8cd7a0e-1b5d-43e7-8737-a9d906981f75 / got description=c97eb642-d99b-4493-b8b0-ae69d9339d29) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect e47dd38e-7dc7-4244-ab4f-31e444775d1a / got description=c8cd7a0e-1b5d-43e7-8737-a9d906981f75) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect e47dd38e-7dc7-4244-ab4f-31e444775d1a / got description=c8cd7a0e-1b5d-43e7-8737-a9d906981f75) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect e47dd38e-7dc7-4244-ab4f-31e444775d1a / got description=c8cd7a0e-1b5d-43e7-8737-a9d906981f75) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:210 Test create - modrdn INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5dba3fdb-2344-4a22-ab03-f9b157abae2e / got description=e47dd38e-7dc7-4244-ab4f-31e444775d1a) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5dba3fdb-2344-4a22-ab03-f9b157abae2e / got description=e47dd38e-7dc7-4244-ab4f-31e444775d1a) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 3d1bde72-2f0d-42c6-ba82-880f124685c6 / got description=5dba3fdb-2344-4a22-ab03-f9b157abae2e) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working
Passed suites/replication/conflict_resolve_test.py::TestTwoMasters::test_conflict_attribute_multi_valued 10.78
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:833 Check foo1 is on M1 INFO  tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:833 Check foo2 is on M1 INFO  tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:839 Check foo1 is on M1 INFO  tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:839 Check foo2 is on M1 INFO  tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:844 Check M1.uid foo1 is also on M2 INFO  tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:844 Check M1.uid foo2 is also on M2 INFO  tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:848 Check M2.uid foo1 is also on M1 INFO  tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:848 Check M2.uid foo2 is also on M1
Passed suites/replication/conflict_resolve_test.py::TestTwoMasters::test_conflict_attribute_single_valued 9.69
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:921 Check foo1 is on M1 INFO  tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:927 Check foo1 is on M2 INFO  tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:932 Check M1.uid foo1 is also on M2 INFO  tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:936 Check M2.uid foo1 is also on M1
Passed suites/replication/encryption_cl5_test.py::test_algorithm_unhashed 44.30
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 8d1ec0f9-5bf1-4aea-b616-d2ba81ddc8d8 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 31d013a6-7f59-44a7-9718-979ef748efac / got description=8d1ec0f9-5bf1-4aea-b616-d2ba81ddc8d8) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect be3ce308-33ef-47cf-9f74-3c12df9b8fdc / got description=31d013a6-7f59-44a7-9718-979ef748efac) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 is working
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.encryption_cl5_test:encryption_cl5_test.py:47 Configuring changelog encryption:master1 for: AES INFO  tests.suites.replication.encryption_cl5_test:encryption_cl5_test.py:64 Running dbscan -f to check unhashed#user#password attr INFO  lib389:__init__.py:3014 Running script: ['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-master1/db/userRoot/replication_changelog.db'] INFO  tests.suites.replication.encryption_cl5_test:encryption_cl5_test.py:64 Running dbscan -f to check unhashed#user#password attr INFO  lib389:__init__.py:3014 Running script: ['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-master2/db/userRoot/replication_changelog.db'] INFO  tests.suites.replication.encryption_cl5_test:encryption_cl5_test.py:64 Running dbscan -f to check unhashed#user#password attr INFO  lib389:__init__.py:3014 Running script: ['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-master1/db/userRoot/replication_changelog.db'] INFO  tests.suites.replication.encryption_cl5_test:encryption_cl5_test.py:64 Running dbscan -f to check unhashed#user#password attr INFO  lib389:__init__.py:3014 Running script: ['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-master2/db/userRoot/replication_changelog.db'] INFO  tests.suites.replication.encryption_cl5_test:encryption_cl5_test.py:64 Running dbscan -f to check unhashed#user#password attr INFO  lib389:__init__.py:3014 Running script: ['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-master1/db/userRoot/replication_changelog.db'] INFO  tests.suites.replication.encryption_cl5_test:encryption_cl5_test.py:64 Running dbscan -f to check unhashed#user#password attr INFO  lib389:__init__.py:3014 Running script: ['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-master2/db/userRoot/replication_changelog.db'] INFO  tests.suites.replication.encryption_cl5_test:encryption_cl5_test.py:64 Running dbscan -f to check unhashed#user#password attr INFO  lib389:__init__.py:3014 Running script: ['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-master1/db/userRoot/replication_changelog.db'] INFO  tests.suites.replication.encryption_cl5_test:encryption_cl5_test.py:64 Running dbscan -f to check unhashed#user#password attr INFO  lib389:__init__.py:3014 Running script: ['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-master2/db/userRoot/replication_changelog.db']
Passed suites/replication/multiple_changelogs_test.py::test_multiple_changelogs 12.21
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for consumer1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:169 Joining consumer consumer1 from master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect b08a188c-7eef-4e15-9e70-150cb4d206ef / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is working INFO  lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 INFO  lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 already exists
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect ab2a24c4-bad9-4e4b-a7b8-6e9296af932b / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is working INFO  lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201
Passed suites/replication/multiple_changelogs_test.py::test_multiple_changelogs_export_import 10.02
No log output captured.
Passed suites/replication/regression_test.py::test_special_symbol_replica_agreement 1.03
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38902, 'ldap-secureport': 63602, 'server-id': 'standalone2', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  lib389:backend.py:80 List backend with suffix=dc=example,dc=com INFO  lib389:__init__.py:1713 Found entry dn: cn=replication manager,cn=config cn: bind dn pseudo user cn: replication manager objectClass: top objectClass: person sn: bind dn pseudo user userPassword: {PBKDF2_SHA256}AAAIADIpf4qIObO2PlQ0/++frOKGqK1mAC+baEjQn91V8n5MVbOFo8K9RCAkUKfrvaa39cKf06x3gIekaTsFQFlaQIDZzaGc9/4lSd8o2u/9PRR4qJsLwZTOO5WVwffGCqTeqi9uIMYjdgwJdeY0SNCNe/3iLYinFFagkdAUDzfwBrFnLCncZhBsST4vA9k21iHbLr8bE3MH07IkuyERxlT19WbtSDY2O8I4zXXcS2FsBedgpbp/akLDvXqhO7IJ8fW/8K5YdbXLA3q4oGu3opuJBZ3cGKfOIXph3CDbkQO6WT/FZAPT0Qu1RSF8AAURzxys92Y+6lGiTU/cqZOAM4hEVPz2qDxU92hFhICJniK7YVPUTGmdJivdZrJjyMFcjd+TqF/DG4oTOy+ijzw0h+g5AZtYshca2p9xPQpsTMAekY+4 INFO  lib389:agreement.py:1169 Starting total init cn=-3meTo_ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:38902,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config
Passed suites/replication/regression_test.py::test_double_delete 2.33
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect fdc5f744-b755-4d47-93d2-d72210bb3665 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 440aa0d4-6149-4288-9b38-637d617f3a17 / got description=fdc5f744-b755-4d47-93d2-d72210bb3665) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  tests.suites.replication.regression_test:regression_test.py:141 Adding a test entry user
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.regression_test:regression_test.py:268 Deleting entry uid=testuser,ou=People,dc=example,dc=com from master1 INFO  tests.suites.replication.regression_test:regression_test.py:271 Deleting entry uid=testuser,ou=People,dc=example,dc=com from master2 INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect a5689c8f-b1fc-4c6e-b518-e09438cd06e4 / got description=440aa0d4-6149-4288-9b38-637d617f3a17) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 48d10e82-8ea5-40b7-bc3d-f697428e078f / got description=a5689c8f-b1fc-4c6e-b518-e09438cd06e4) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working
Passed suites/replication/regression_test.py::test_repl_modrdn 6.46
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.regression_test:regression_test.py:314 Add test entries - Add 3 OUs and 2 same users under 2 different OUs INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect eeb665cd-43d3-4446-87b9-3ce63f993a94 / got description=48d10e82-8ea5-40b7-bc3d-f697428e078f) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect a8a80feb-8827-47be-9f91-6e9be3acb908 / got description=eeb665cd-43d3-4446-87b9-3ce63f993a94) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  tests.suites.replication.regression_test:regression_test.py:338 Stop Replication INFO  tests.suites.replication.regression_test:regression_test.py:341 Apply modrdn to M1 - move test user from OU A -> C INFO  tests.suites.replication.regression_test:regression_test.py:344 Apply modrdn on M2 - move test user from OU B -> C INFO  tests.suites.replication.regression_test:regression_test.py:347 Start Replication INFO  tests.suites.replication.regression_test:regression_test.py:350 Wait for sometime for repl to resume INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 9ecd104f-f04e-4aac-be96-f8528f7fc047 / got description=a8a80feb-8827-47be-9f91-6e9be3acb908) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect cd3ad460-a65f-4c9b-b467-fec34f01d079 / got description=9ecd104f-f04e-4aac-be96-f8528f7fc047) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  tests.suites.replication.regression_test:regression_test.py:354 Check that there should be only one test entry under ou=C on both masters INFO  tests.suites.replication.regression_test:regression_test.py:361 Check that the replication is working fine both ways, M1 <-> M2 INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 91b491cc-7d1a-4105-b1d1-1dd0d4285d58 / got description=cd3ad460-a65f-4c9b-b467-fec34f01d079) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect f8c7bad7-d567-4199-8ba7-cc0af1a0d98b / got description=91b491cc-7d1a-4105-b1d1-1dd0d4285d58) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working
Passed suites/replication/regression_test.py::test_password_repl_error 14.00
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.replication.regression_test:regression_test.py:141 Adding a test entry user
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.regression_test:regression_test.py:385 Clean the error log INFO  tests.suites.replication.regression_test:regression_test.py:388 Set replication loglevel INFO  tests.suites.replication.regression_test:regression_test.py:391 Modifying entry uid=testuser,ou=People,dc=example,dc=com - change userpassword on master 1 INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 6eba6f64-8303-4e01-9aef-7244e292b289 / got description=f8c7bad7-d567-4199-8ba7-cc0af1a0d98b) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  tests.suites.replication.regression_test:regression_test.py:398 Restart the servers to flush the logs INFO  tests.suites.replication.regression_test:regression_test.py:403 Check that password works on master 2 INFO  tests.suites.replication.regression_test:regression_test.py:407 Check the error log for the error with uid=testuser,ou=People,dc=example,dc=com INFO  tests.suites.replication.regression_test:regression_test.py:410 Set the default loglevel
Passed suites/replication/regression_test.py::test_invalid_agmt 2.05
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect e7d3e318-fbc5-4a7a-83df-865c806ba1e6 / got description=6eba6f64-8303-4e01-9aef-7244e292b289) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect dccb733c-3bbd-4710-a2c3-5aa77b2ccb3a / got description=e7d3e318-fbc5-4a7a-83df-865c806ba1e6) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working
Passed suites/replication/regression_test.py::test_fetch_bindDnGroup 20.18
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 69aaa4be-923d-42de-af0c-2cb9b27da132 / got description=dccb733c-3bbd-4710-a2c3-5aa77b2ccb3a) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working
Passed suites/replication/regression_test.py::test_plugin_bind_dn_tracking_and_replication 0.76
No log output captured.
Passed suites/replication/regression_test.py::test_cleanallruv_repl 116.63
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master3 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'master3', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c2e31a98-a582-4501-a8ef-0d364264adfb / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect bfd6fbfa-1ac6-4a4d-b57a-c56f990ecc67 / got description=c2e31a98-a582-4501-a8ef-0d364264adfb) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:156 Joining master master3 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 31c020ea-f790-4401-9d07-7997c42f3125 / got description=bfd6fbfa-1ac6-4a4d-b57a-c56f990ecc67) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 7ad64b85-8e1c-4532-ac35-83b3a87c3f8a / got description=31c020ea-f790-4401-9d07-7997c42f3125) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master3 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master3 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master2 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.regression_test:regression_test.py:680 Change the error log levels for all masters INFO  tests.suites.replication.regression_test:regression_test.py:684 Get the replication agreements for all 3 masters INFO  tests.suites.replication.regression_test:regression_test.py:689 Modify nsslapd-changelogmaxage=30 and nsslapd-changelogtrim-interval=5 for M1 and M2 INFO  tests.suites.replication.regression_test:regression_test.py:712 Add test users to 3 masters INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:tasks.py:1400 cleanAllRUV task (task-10282020_231411) completed successfully INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config
Passed suites/replication/regression_test.py::test_online_reinit_may_hang 18.42
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 4342da54-6d0a-4b65-a06d-687cb98ee36d / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 14a1db17-a563-42ce-9e0f-21558acdf396 / got description=4342da54-6d0a-4b65-a06d-687cb98ee36d) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
------------------------------Captured stderr call------------------------------
ldiffile: /var/lib/dirsrv/slapd-master1/ldif/master1.ldif
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 3c1cead4-324b-44fb-8c84-917cd42c4223 / got description=14a1db17-a563-42ce-9e0f-21558acdf396) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 2536bceb-17c1-43c8-a1c8-cac0a902aeba / got description=3c1cead4-324b-44fb-8c84-917cd42c4223) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working
Passed suites/replication/regression_test.py::test_moving_entry_make_online_init_fail 1.43
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 2cd778dd-fa9b-4cb7-b5ca-e74dab602f67 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect a7e954c8-278a-4b64-977e-9be5f8f7d33d / got description=2cd778dd-fa9b-4cb7-b5ca-e74dab602f67) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.regression_test:regression_test.py:862 Generating DIT_0 INFO  tests.suites.replication.regression_test:regression_test.py:865 Created entry: ou=OU0, dc=example, dc=com INFO  tests.suites.replication.regression_test:regression_test.py:870 Created entry: ou=OU0, ou=OU0, dc=example, dc=com INFO  tests.suites.replication.regression_test:regression_test.py:191 Create password policy for subtree ou=OU0,dc=example,dc=com INFO  tests.suites.replication.regression_test:regression_test.py:879 Turning tuser0 into a tombstone entry INFO  tests.suites.replication.regression_test:regression_test.py:879 Turning tuser2 into a tombstone entry INFO  tests.suites.replication.regression_test:regression_test.py:879 Turning tuser4 into a tombstone entry INFO  tests.suites.replication.regression_test:regression_test.py:879 Turning tuser6 into a tombstone entry INFO  tests.suites.replication.regression_test:regression_test.py:879 Turning tuser8 into a tombstone entry INFO  tests.suites.replication.regression_test:regression_test.py:882 dc=example,dc=com => ou=OU0,dc=example,dc=com => ou=OU0,ou=OU0,dc=example,dc=com => 10 USERS INFO  tests.suites.replication.regression_test:regression_test.py:884 Generating DIT_1 INFO  tests.suites.replication.regression_test:regression_test.py:887 Created entry: ou=OU1,dc=example,dc=com INFO  tests.suites.replication.regression_test:regression_test.py:891 Created entry: ou=OU1, ou=OU1, dc=example, dc=com INFO  tests.suites.replication.regression_test:regression_test.py:191 Create password policy for subtree ou=OU1,dc=example,dc=com INFO  tests.suites.replication.regression_test:regression_test.py:895 Moving ou=OU0,ou=OU0,dc=example,dc=com to DIT_1 INFO  tests.suites.replication.regression_test:regression_test.py:898 Moving ou=OU0,dc=example,dc=com to DIT_1 INFO  tests.suites.replication.regression_test:regression_test.py:905 Moving USERS to ou=OU0,ou=OU0,ou=OU1,dc=example,dc=com INFO  tests.suites.replication.regression_test:regression_test.py:910 dc=example,dc=com => ou=OU1,dc=example,dc=com => ou=OU0,ou=OU1,dc=example,dc=com => ou=OU0,ou=OU0,ou=OU1,dc=example,dc=com => 10 USERS INFO  tests.suites.replication.regression_test:regression_test.py:912 Run Initialization. INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b84bcd0e-9196-4e0f-9b2d-eb01d4bad99f / got description=a7e954c8-278a-4b64-977e-9be5f8f7d33d) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  tests.suites.replication.regression_test:regression_test.py:921 m1entry count - 46 INFO  tests.suites.replication.regression_test:regression_test.py:922 m2entry count - 46
Passed suites/replication/repl_agmt_bootstrap_test.py::test_repl_agmt_bootstrap_credentials 15.63
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 2e57f4a4-1891-4ec4-97b6-d5d87f1fd260 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect b0a6f90e-3f98-4b8c-8b99-e1126e4dc426 / got description=2e57f4a4-1891-4ec4-97b6-d5d87f1fd260) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
Passed suites/replication/replica_config_test.py::test_replica_num_add[nsDS5ReplicaType--1-4-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] 0.76
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/replication/replica_config_test.py::test_replica_num_add[nsDS5Flags--1-2-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] 0.10
No log output captured.
Passed suites/replication/replica_config_test.py::test_replica_num_add[nsDS5ReplicaId-0-65536-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] 0.11
No log output captured.
Passed suites/replication/replica_config_test.py::test_replica_num_add[nsds5ReplicaPurgeDelay--2-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] 0.11
No log output captured.
Passed suites/replication/replica_config_test.py::test_replica_num_add[nsDS5ReplicaBindDnGroupCheckInterval--2-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] 0.11
No log output captured.
Passed suites/replication/replica_config_test.py::test_replica_num_add[nsds5ReplicaTombstonePurgeInterval--2-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] 0.09
No log output captured.
Passed suites/replication/replica_config_test.py::test_replica_num_add[nsds5ReplicaProtocolTimeout--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] 0.10
No log output captured.
Passed suites/replication/replica_config_test.py::test_replica_num_add[nsds5ReplicaReleaseTimeout--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] 0.09
No log output captured.
Passed suites/replication/replica_config_test.py::test_replica_num_add[nsds5ReplicaBackoffMin-0-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-3] 0.09
No log output captured.
Passed suites/replication/replica_config_test.py::test_replica_num_add[nsds5ReplicaBackoffMax-0-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] 0.08
No log output captured.
Passed suites/replication/replica_config_test.py::test_replica_num_modify[nsDS5Flags--1-2-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] 0.08
No log output captured.
Passed suites/replication/replica_config_test.py::test_replica_num_modify[nsds5ReplicaPurgeDelay--2-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] 0.08
No log output captured.
Passed suites/replication/replica_config_test.py::test_replica_num_modify[nsDS5ReplicaBindDnGroupCheckInterval--2-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] 0.07
No log output captured.
Passed suites/replication/replica_config_test.py::test_replica_num_modify[nsds5ReplicaTombstonePurgeInterval--2-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] 0.07
No log output captured.
Passed suites/replication/replica_config_test.py::test_replica_num_modify[nsds5ReplicaProtocolTimeout--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] 0.07
No log output captured.
Passed suites/replication/replica_config_test.py::test_replica_num_modify[nsds5ReplicaReleaseTimeout--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] 0.07
No log output captured.
Passed suites/replication/replica_config_test.py::test_replica_num_modify[nsds5ReplicaBackoffMin-0-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-3] 0.07
No log output captured.
Passed suites/replication/replica_config_test.py::test_replica_num_modify[nsds5ReplicaBackoffMax-0-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] 0.07
No log output captured.
Passed suites/replication/replica_config_test.py::test_same_attr_yields_same_return_code 0.19
No log output captured.
Passed suites/replication/ruvstore_test.py::test_ruv_entry_backup 6.67
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect d6da2a5c-e8e3-40bc-90f4-c57aa388598d / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 62fb8b20-e8cc-4fd1-a2f2-3948893659f6 / got description=d6da2a5c-e8e3-40bc-90f4-c57aa388598d) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
------------------------------Captured stderr call------------------------------
ldiffile: /var/lib/dirsrv/slapd-master1/ldif/master1.ldif
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.ruvstore_test:ruvstore_test.py:102 LDAP operations add, modify, modrdn and delete INFO  tests.suites.replication.ruvstore_test:ruvstore_test.py:58 Adding user to master1 INFO  tests.suites.replication.ruvstore_test:ruvstore_test.py:61 Modify RDN of user: uid=rep2lusr,ou=People,dc=example,dc=com INFO  tests.suites.replication.ruvstore_test:ruvstore_test.py:68 Deleting user: uid=ruvusr,ou=people,dc=example,dc=com INFO  tests.suites.replication.ruvstore_test:ruvstore_test.py:106 Stopping the server instance to run db2ldif task to create backup file INFO  tests.suites.replication.ruvstore_test:ruvstore_test.py:110 Starting the server after backup INFO  tests.suites.replication.ruvstore_test:ruvstore_test.py:113 Checking if backup file contains RUV and required attributes INFO  tests.suites.replication.ruvstore_test:ruvstore_test.py:51 Attribute found in RUV: objectClass INFO  tests.suites.replication.ruvstore_test:ruvstore_test.py:51 Attribute found in RUV: nsUniqueId INFO  tests.suites.replication.ruvstore_test:ruvstore_test.py:51 Attribute found in RUV: nsds50ruv INFO  tests.suites.replication.ruvstore_test:ruvstore_test.py:51 Attribute found in RUV: nsruvReplicaLastModified
Passed suites/replication/series_of_repl_bugs_test.py::test_deletions_are_not_replicated 8.76
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect e1f6d669-27a2-4534-a6a6-625f4ef6e218 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ad1d8389-7e4e-4a93-86e7-b0d51555464a / got description=e1f6d669-27a2-4534-a6a6-625f4ef6e218) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b77093fc-48e1-4b12-bd4d-b4e682a4d041 / got description=ad1d8389-7e4e-4a93-86e7-b0d51555464a) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working
Passed suites/replication/series_of_repl_bugs_test.py::test_error_20 1.03
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 78a55c1e-d83d-4d85-b722-52d772403537 / got description=157d3c83-dcec-41b3-bd25-6afce1f8865b) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working
Passed suites/replication/series_of_repl_bugs_test.py::test_segfaults 0.04
No log output captured.
Passed suites/replication/series_of_repl_bugs_test.py::test_adding_deleting 0.19
No log output captured.
Passed suites/replication/series_of_repl_bugs_test.py::test_deleting_twice 2.40
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0d0681e9-0a3d-4d33-9721-8910a362f90b / got description=78a55c1e-d83d-4d85-b722-52d772403537) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 422dae97-e8cc-4276-a95b-54e01cd14ef5 / got description=0d0681e9-0a3d-4d33-9721-8910a362f90b) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working
Passed suites/replication/series_of_repl_bugs_test.py::test_rename_entry 2.29
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 46639fd9-d2cd-4839-8425-c5beeb06d10b / got description=422dae97-e8cc-4276-a95b-54e01cd14ef5) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 8c8546d8-094a-4b96-afcb-dfd3fdb91c4e / got description=46639fd9-d2cd-4839-8425-c5beeb06d10b) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working
Passed suites/replication/series_of_repl_bugs_test.py::test_userpassword_attribute 2.12
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 6f1bd65e-581a-4672-a304-2dca90ea6ca7 / got description=8c8546d8-094a-4b96-afcb-dfd3fdb91c4e) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working
Passed suites/replication/series_of_repl_bugs_test.py::test_tombstone_modrdn 0.08
No log output captured.
Passed suites/replication/single_master_test.py::test_mail_attr_repl 14.07
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for consumer1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:169 Joining consumer consumer1 from master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 39ac9778-79c6-43cc-aa4b-1043b81a180e / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is working INFO  lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 INFO  lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 already exists
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.single_master_test:single_master_test.py:67 Check that replication is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 8fb723bf-0db2-4b1e-a514-06b7803458d0 / got description=39ac9778-79c6-43cc-aa4b-1043b81a180e) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is working INFO  tests.suites.replication.single_master_test:single_master_test.py:83 Back up /var/lib/dirsrv/slapd-consumer1/db/userRoot/mail.db to /tmp/mail.db INFO  tests.suites.replication.single_master_test:single_master_test.py:87 Remove 'mail' attr from master INFO  tests.suites.replication.single_master_test:single_master_test.py:90 Wait for the replication to happen INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 4619168b-41ce-4a56-8247-6db6685a4d87 / got description=8fb723bf-0db2-4b1e-a514-06b7803458d0) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 4619168b-41ce-4a56-8247-6db6685a4d87 / got description=8fb723bf-0db2-4b1e-a514-06b7803458d0) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 4619168b-41ce-4a56-8247-6db6685a4d87 / got description=8fb723bf-0db2-4b1e-a514-06b7803458d0) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 4619168b-41ce-4a56-8247-6db6685a4d87 / got description=8fb723bf-0db2-4b1e-a514-06b7803458d0) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is working INFO  tests.suites.replication.single_master_test:single_master_test.py:94 Restore /tmp/mail.db to /var/lib/dirsrv/slapd-consumer1/db/userRoot/mail.db INFO  tests.suites.replication.single_master_test:single_master_test.py:98 Make a search for mail attribute in attempt to crash server INFO  tests.suites.replication.single_master_test:single_master_test.py:101 Make sure that server hasn't crashed INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 356c5aa3-bb85-4f55-b625-4a90b328af2f / got description=4619168b-41ce-4a56-8247-6db6685a4d87) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 356c5aa3-bb85-4f55-b625-4a90b328af2f / got description=4619168b-41ce-4a56-8247-6db6685a4d87) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 356c5aa3-bb85-4f55-b625-4a90b328af2f / got description=4619168b-41ce-4a56-8247-6db6685a4d87) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 356c5aa3-bb85-4f55-b625-4a90b328af2f / got description=4619168b-41ce-4a56-8247-6db6685a4d87) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is working
Passed suites/replication/single_master_test.py::test_lastupdate_attr_before_init 0.16
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38902, 'ldap-secureport': 63602, 'server-id': 'standalone2', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:38901 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:38902 is was created
Passed suites/replication/tls_client_auth_repl_test.py::test_ssl_transport 8.24
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c6b31c9d-b1ca-4252-834e-a7c9bf9254ca / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 2affb069-2aa2-40c4-a3e3-9e5717e7164e / got description=c6b31c9d-b1ca-4252-834e-a7c9bf9254ca) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect aabcfb57-bc7f-4b79-a10f-386f097edeac / got description=2affb069-2aa2-40c4-a3e3-9e5717e7164e) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect e50a6a62-1359-4a50-a93f-cb08290635ff / got description=aabcfb57-bc7f-4b79-a10f-386f097edeac) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect f3192ad6-7d08-4734-afd7-fdfa67055f79 / got description=e50a6a62-1359-4a50-a93f-cb08290635ff) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 is working
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect b95512df-7627-420a-80ef-ecd568436f69 / got description=f3192ad6-7d08-4734-afd7-fdfa67055f79) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect 71d02fbb-8575-4add-8f51-9eef0567fc02 / got description=b95512df-7627-420a-80ef-ecd568436f69) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect 3059d726-635b-4ed3-8186-b7caac6f5c32 / got description=71d02fbb-8575-4add-8f51-9eef0567fc02) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect 46f637bc-37f9-44f0-acbc-81e4a6738f09 / got description=3059d726-635b-4ed3-8186-b7caac6f5c32) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect e04b3c77-2fa2-40da-84ba-df3a4e907fb5 / got description=46f637bc-37f9-44f0-acbc-81e4a6738f09) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect 95223eb9-a32e-420a-a1e6-eadd972dcc9e / got description=e04b3c77-2fa2-40da-84ba-df3a4e907fb5) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect 1ac9501d-84cf-437d-813a-35e63fccd7c1 / got description=95223eb9-a32e-420a-a1e6-eadd972dcc9e) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect abab4f84-1e8a-440b-9d2a-b1f6e1abaf4b / got description=1ac9501d-84cf-437d-813a-35e63fccd7c1) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 is working
Passed suites/replication/tls_client_auth_repl_test.py::test_extract_pemfiles 4.09
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.tls_client_auth_repl_test:tls_client_auth_repl_test.py:168 Check that nsslapd-extract-pemfiles is on INFO  tests.suites.replication.tls_client_auth_repl_test:tls_client_auth_repl_test.py:172 Set nsslapd-extract-pemfiles = '{}' and check replication works) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect 40309688-36b0-4901-a734-5aae4dfb8e36 / got description=abab4f84-1e8a-440b-9d2a-b1f6e1abaf4b) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect 246b326c-2ae1-4d5c-bdee-6b0fe0723623 / got description=40309688-36b0-4901-a734-5aae4dfb8e36) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 is working INFO  tests.suites.replication.tls_client_auth_repl_test:tls_client_auth_repl_test.py:172 Set nsslapd-extract-pemfiles = '{}' and check replication works) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect 37b33fe2-b03e-48c3-83e6-8eadc4f4d85d / got description=246b326c-2ae1-4d5c-bdee-6b0fe0723623) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect e035a09e-f84f-43c2-a38b-948dc2573140 / got description=37b33fe2-b03e-48c3-83e6-8eadc4f4d85d) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 is working
Passed suites/replication/tombstone_fixup_test.py::test_precise_tombstone_purging 23.23
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology.
-------------------------------Captured log call--------------------------------
INFO  lib389:tasks.py:567 Export task export_10282020_232105 for file /var/lib/dirsrv/slapd-master1/ldif/export.ldif completed successfully INFO  lib389.utils:tombstone_fixup_test.py:77 Import replication LDIF file... INFO  lib389:tasks.py:498 Import task import_10282020_232109 for file /var/lib/dirsrv/slapd-master1/ldif/export.ldif completed successfully INFO  lib389:tasks.py:937 tombstone fixup task fixupTombstone_10282020_232111 for backend userRoot completed successfully INFO  lib389:tasks.py:937 tombstone fixup task fixupTombstone_10282020_232114 for backend userRoot completed successfully INFO  lib389.utils:tombstone_fixup_test.py:116 Wait for tombstone purge interval to pass... INFO  lib389.utils:tombstone_fixup_test.py:123 Wait for tombstone purge interval to pass again...
Passed suites/replication/tombstone_test.py::test_purge_success 0.10
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology.
-------------------------------Captured log call--------------------------------
INFO  Tombstone:tombstone.py:165 Reviving nsuniqueid=db992386-199511eb-ae38f6a0-f97954da,uid=testuser,ou=people,dc=example,dc=com -> uid=testuser,ou=people,dc=example,dc=com
Passed suites/replication/wait_for_async_feature_test.py::test_not_int_value 0.00
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect fbf146f6-8a13-4828-b5a1-ecaf3a96ab7d / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 224918eb-8179-42af-a90b-eff26ba73de1 / got description=fbf146f6-8a13-4828-b5a1-ecaf3a96ab7d) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
Passed suites/replication/wait_for_async_feature_test.py::test_multi_value 0.02
No log output captured.
Passed suites/replication/wait_for_async_feature_test.py::test_value_check[waitfor_async_attr0] 0.00
No log output captured.
Passed suites/replication/wait_for_async_feature_test.py::test_value_check[waitfor_async_attr1] 0.00
No log output captured.
Passed suites/replication/wait_for_async_feature_test.py::test_value_check[waitfor_async_attr2] 0.00
No log output captured.
Passed suites/replication/wait_for_async_feature_test.py::test_value_check[waitfor_async_attr3] 0.00
No log output captured.
Passed suites/replication/wait_for_async_feature_test.py::test_behavior_with_value[waitfor_async_attr0] 20.03
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:63 Add 100 nested entries under replicated suffix on master1 INFO  tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:71 Delete created entries
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:170 Set Replication Debugging loglevel for the errorlog INFO  tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:180 Gather all sync attempts within Counter dict, group by timestamp INFO  tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:201 Take the most common timestamp and assert it has appeared in the range from 4 to 11 times DEBUG  tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:204 4 <= 9 <= 11
Passed suites/replication/wait_for_async_feature_test.py::test_behavior_with_value[waitfor_async_attr1] 20.09
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:63 Add 100 nested entries under replicated suffix on master1 INFO  tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:71 Delete created entries
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:170 Set Replication Debugging loglevel for the errorlog INFO  tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:180 Gather all sync attempts within Counter dict, group by timestamp INFO  tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:201 Take the most common timestamp and assert it has appeared in the range from 0 to 2 times DEBUG  tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:204 0 <= 1 <= 2
Passed suites/replication/wait_for_async_feature_test.py::test_behavior_with_value[waitfor_async_attr2] 20.33
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:63 Add 100 nested entries under replicated suffix on master1 INFO  tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:71 Delete created entries
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:170 Set Replication Debugging loglevel for the errorlog INFO  tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:180 Gather all sync attempts within Counter dict, group by timestamp INFO  tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:201 Take the most common timestamp and assert it has appeared in the range from 4 to 11 times DEBUG  tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:204 4 <= 5 <= 11
Passed suites/replication/wait_for_async_feature_test.py::test_behavior_with_value[waitfor_async_attr3] 20.08
-------------------------------Captured log setup-------------------------------
INFO  tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:63 Add 100 nested entries under replicated suffix on master1 INFO  tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:71 Delete created entries
-------------------------------Captured log call--------------------------------
INFO  tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:170 Set Replication Debugging loglevel for the errorlog INFO  tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:180 Gather all sync attempts within Counter dict, group by timestamp INFO  tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:201 Take the most common timestamp and assert it has appeared in the range from 4 to 11 times DEBUG  tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:204 4 <= 7 <= 11
Passed suites/resource_limits/fdlimits_test.py::test_fd_limits 0.53
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.suites.resource_limits.fdlimits_test:fdlimits_test.py:69 Test PASSED
Passed suites/rewriters/adfilter_test.py::test_adfilter_objectCategory 5.50
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/rewriters/basic_test.py::test_rewriters_container 0.00
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/rewriters/basic_test.py::test_foo_filter_rewriter 4.37
No log output captured.
Passed suites/roles/basic_test.py::test_filterrole 0.73
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/roles/basic_test.py::test_managedrole 0.18
No log output captured.
Passed suites/roles/basic_test.py::test_nestedrole 0.27
No log output captured.
Passed suites/sasl/allowed_mechs_test.py::test_basic_feature 36.94
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
------------------------------Captured stdout call------------------------------
['EXTERNAL', 'GSS-SPNEGO', 'GSSAPI', 'DIGEST-MD5', 'CRAM-MD5', 'LOGIN', 'PLAIN', 'ANONYMOUS']
-------------------------------Captured log call--------------------------------
INFO  lib389:allowed_mechs_test.py:75 Test we have some of the default mechanisms INFO  lib389:allowed_mechs_test.py:83 Edit mechanisms to allow just PLAIN INFO  lib389:allowed_mechs_test.py:91 Restart server and make sure we still have correct allowed mechs INFO  lib389:allowed_mechs_test.py:100 Edit mechanisms to allow just PLAIN and EXTERNAL INFO  lib389:allowed_mechs_test.py:108 Edit mechanisms to allow just PLAIN and GSSAPI INFO  lib389:allowed_mechs_test.py:126 Edit mechanisms to allow just PLAIN, GSSAPI, and ANONYMOUS INFO  lib389:allowed_mechs_test.py:146 Edit mechanisms to allow just PLAIN and ANONYMOUS INFO  lib389:allowed_mechs_test.py:165 Reset allowed mechaisms INFO  lib389:allowed_mechs_test.py:169 Check that we have the original set of mechanisms INFO  lib389:allowed_mechs_test.py:174 Check that we have the original set of mechanisms after a restart
Passed suites/sasl/allowed_mechs_test.py::test_config_set_few_mechs 0.14
-------------------------------Captured log call--------------------------------
INFO  lib389:allowed_mechs_test.py:198 Set nsslapd-allowed-sasl-mechanisms to 'PLAIN GSSAPI' INFO  lib389:allowed_mechs_test.py:201 Verify nsslapd-allowed-sasl-mechanisms has the values
Passed suites/sasl/plain_test.py::test_basic_feature 10.91
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/sasl/regression_test.py::test_openldap_no_nss_crypto 34.01
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 7be38be9-8859-422d-94dc-bb2f88097ef2 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 0170ee8e-d1ac-4a79-9d54-ebfa2b2e3c11 / got description=7be38be9-8859-422d-94dc-bb2f88097ef2) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
-------------------------------Captured log call--------------------------------
INFO  tests.suites.sasl.regression_test:regression_test.py:133 Ticket 47536 - Allow usage of OpenLDAP libraries that don't use NSS for crypto INFO  lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect 993671e5-57f9-41f4-81fd-d4c6721491df / got description=0170ee8e-d1ac-4a79-9d54-ebfa2b2e3c11) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 is working INFO  tests.suites.sasl.regression_test:regression_test.py:35 ######################### Adding 5 entries to master1 ###################### INFO  tests.suites.sasl.regression_test:regression_test.py:35 ######################### Adding 5 entries to master2 ###################### INFO  lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect fcf9e21e-df26-429b-a626-01ff28895dcd / got description=993671e5-57f9-41f4-81fd-d4c6721491df) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect 6a5db222-ff0f-4a71-9525-49b891bbea9c / got description=fcf9e21e-df26-429b-a626-01ff28895dcd) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 is working INFO  tests.suites.sasl.regression_test:regression_test.py:146 ##### Searching for entries on master1... INFO  tests.suites.sasl.regression_test:regression_test.py:150 ##### Searching for entries on master2... INFO  tests.suites.sasl.regression_test:regression_test.py:92 ######################### Relocate PEM files on master1 ###################### INFO  tests.suites.sasl.regression_test:regression_test.py:100 ##### restart master1 INFO  tests.suites.sasl.regression_test:regression_test.py:47 ######################### Check PEM files (/dev/shm/MyCA, /dev/shm/MyServerCert1, /dev/shm/MyServerKey1) in /dev/shm ###################### INFO  tests.suites.sasl.regression_test:regression_test.py:53 /dev/shm/MyCA.pem is successfully generated. INFO  tests.suites.sasl.regression_test:regression_test.py:66 /dev/shm/MyServerCert1.pem is successfully generated. INFO  tests.suites.sasl.regression_test:regression_test.py:79 /dev/shm/MyServerKey1.pem is successfully generated. INFO  tests.suites.sasl.regression_test:regression_test.py:35 ######################### Adding 5 entries to master1 ###################### INFO  tests.suites.sasl.regression_test:regression_test.py:35 ######################### Adding 5 entries to master2 ###################### INFO  lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect d4d99a2e-6306-4050-a9d5-a633f7dcef04 / got description=6a5db222-ff0f-4a71-9525-49b891bbea9c) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect 1af814f9-85d6-4128-b0b3-53e4875ff283 / got description=d4d99a2e-6306-4050-a9d5-a633f7dcef04) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect 1af814f9-85d6-4128-b0b3-53e4875ff283 / got description=d4d99a2e-6306-4050-a9d5-a633f7dcef04) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701 is working INFO  tests.suites.sasl.regression_test:regression_test.py:162 ##### Searching for entries on master1... INFO  tests.suites.sasl.regression_test:regression_test.py:166 ##### Searching for entries on master2... INFO  lib389:tasks.py:567 Export task export_10282020_232804 for file /var/lib/dirsrv/slapd-master1/ldif/master1.ldif completed successfully INFO  tests.suites.sasl.regression_test:regression_test.py:173 Ticket 47536 - PASSED
Passed suites/schema/eduperson_test.py::test_account_locking 0.93
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.suites.schema.eduperson_test:eduperson_test.py:88 Test PASSED
Passed suites/schema/schema_reload_test.py::test_schema_reload_with_searches 1.29
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.suites.schema.schema_reload_test:schema_reload_test.py:49 Test the searches still work as expected during schema reload tasks
Passed suites/schema/schema_reload_test.py::test_invalid_schema 2.14
-------------------------------Captured log call--------------------------------
INFO  tests.suites.schema.schema_reload_test:schema_reload_test.py:234 Test schema-reload task with invalid schema INFO  tests.suites.schema.schema_reload_test:schema_reload_test.py:237 Create valid schema file (98user.ldif)... INFO  tests.suites.schema.schema_reload_test:schema_reload_test.py:252 Create invalid schema file (99user.ldif)... INFO  tests.suites.schema.schema_reload_test:schema_reload_test.py:269 Run the schema-reload task, it should fail... INFO  tests.suites.schema.schema_reload_test:schema_reload_test.py:276 Check cn=schema to verify the invalid schema was not added INFO  tests.suites.schema.schema_reload_test:schema_reload_test.py:283 The invalid schema is not present on the server
Passed suites/schema/schema_replication_test.py::test_schema_replication_one 9.43
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for consumer1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:169 Joining consumer consumer1 from master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 05994c4e-5483-48d8-add3-d917bd51ed21 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is working INFO  lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 INFO  lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 already exists DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:182 test_schema_replication_init topology_m1c1 <lib389.topologies.TopologyMain object at 0x7fd167f24820> (master <lib389.DirSrv object at 0x7fd167f39490>, consumer <lib389.DirSrv object at 0x7fd167591bb0>
-------------------------------Captured log call--------------------------------
INFO  lib389:schema_replication_test.py:41 ############################################### INFO  lib389:schema_replication_test.py:42 ####### INFO  lib389:schema_replication_test.py:43 ####### Extra OC Schema is pushed - no error INFO  lib389:schema_replication_test.py:44 ####### INFO  lib389:schema_replication_test.py:45 ################################################### DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:222 test_schema_replication_one topology_m1c1 <lib389.topologies.TopologyMain object at 0x7fd167f24820> (master <lib389.DirSrv object at 0x7fd167f39490>, consumer <lib389.DirSrv object at 0x7fd167591bb0> DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive 0 (expected 1) INFO  lib389:agreement.py:1194 Pausing replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'1' (expected 2) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:233 test_schema_replication_one master_schema_csn=b'5f9a3700000000000000' DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:234 ctest_schema_replication_one onsumer_schema_csn=b'5f9a3700000000000000' DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:55 _pattern_errorlog: start at offset 0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [38] 389-Directory/1.4.5.0 B2020.303.0011 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [95] localhost.localdomain:39001 (/etc/dirsrv/slapd-master1) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [96] DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [200] [28/Oct/2020:23:28:41.229008378 -0400] - INFO - main - 389-Directory/1.4.5.0 B2020.303.0011 starting up DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [308] [28/Oct/2020:23:28:41.231568975 -0400] - INFO - main - Setting the maximum file descriptor limit to: 524288 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [416] [28/Oct/2020:23:28:42.012286339 -0400] - INFO - PBKDF2_SHA256 - Based on CPU performance, chose 2048 rounds DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [530] [28/Oct/2020:23:28:42.020371974 -0400] - INFO - bdb_config_upgrade_dse_info - create config entry from old config DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [632] [28/Oct/2020:23:28:42.028629343 -0400] - NOTICE - bdb_start_autotune - found 7980868k physical memory DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [728] [28/Oct/2020:23:28:42.032499684 -0400] - NOTICE - bdb_start_autotune - found 7313980k available DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [835] [28/Oct/2020:23:28:42.035177131 -0400] - NOTICE - bdb_start_autotune - cache autosizing: db cache: 498804k DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [938] [28/Oct/2020:23:28:42.037898785 -0400] - NOTICE - bdb_start_autotune - total cache size: 408620441 B; DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [1074] [28/Oct/2020:23:28:42.279020288 -0400] - INFO - slapd_daemon - slapd started. Listening on All Interfaces port 39001 for LDAP requests DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [1199] [28/Oct/2020:23:28:42.283852576 -0400] - INFO - slapd_daemon - Listening on /var/run/slapd-master1.socket for LDAPI requests DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [1358] [28/Oct/2020:23:28:42.557432236 -0400] - INFO - postop_modify_config_dse - The change of nsslapd-securePort will not take effect until the server is restarted DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [1475] [28/Oct/2020:23:28:42.566779798 -0400] - INFO - ldbm_instance_config_cachememsize_set - force a minimal value 512000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [1653] [28/Oct/2020:23:28:43.730997831 -0400] - INFO - op_thread_cleanup - slapd shutting down - signaling operation threads - op stack size 2 max work q size 1 max work q stack size 1 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [1783] [28/Oct/2020:23:28:43.737762282 -0400] - INFO - slapd_daemon - slapd shutting down - closing down internal subsystems and plugins DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [1886] [28/Oct/2020:23:28:43.744674347 -0400] - INFO - bdb_pre_close - Waiting for 4 database threads to stop DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [1983] [28/Oct/2020:23:28:44.718539262 -0400] - INFO - bdb_pre_close - All database threads now stopped DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [2094] [28/Oct/2020:23:28:44.742182688 -0400] - INFO - ldbm_back_instance_set_destructor - Set of instances destroyed DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [2255] [28/Oct/2020:23:28:44.746246534 -0400] - INFO - connection_post_shutdown_cleanup - slapd shutting down - freed 1 work q stack objects - freed 2 op stack objects DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [2325] [28/Oct/2020:23:28:44.753125571 -0400] - INFO - main - slapd stopped. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [2429] [28/Oct/2020:23:28:46.119288781 -0400] - INFO - main - 389-Directory/1.4.5.0 B2020.303.0011 starting up DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [2537] [28/Oct/2020:23:28:46.122881786 -0400] - INFO - main - Setting the maximum file descriptor limit to: 524288 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [2645] [28/Oct/2020:23:28:46.894779962 -0400] - INFO - PBKDF2_SHA256 - Based on CPU performance, chose 2048 rounds DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [2762] [28/Oct/2020:23:28:46.900962091 -0400] - INFO - ldbm_instance_config_cachememsize_set - force a minimal value 512000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [2864] [28/Oct/2020:23:28:46.907276136 -0400] - NOTICE - bdb_start_autotune - found 7980868k physical memory DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [2960] [28/Oct/2020:23:28:46.910301443 -0400] - NOTICE - bdb_start_autotune - found 7314128k available DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [3067] [28/Oct/2020:23:28:46.914934295 -0400] - NOTICE - bdb_start_autotune - cache autosizing: db cache: 498804k DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [3197] [28/Oct/2020:23:28:46.917552801 -0400] - NOTICE - bdb_start_autotune - cache autosizing: userRoot entry cache (1 total): 1376256k DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [3323] [28/Oct/2020:23:28:46.926584681 -0400] - NOTICE - bdb_start_autotune - cache autosizing: userRoot dn cache (1 total): 196608k DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [3427] [28/Oct/2020:23:28:46.930175021 -0400] - NOTICE - bdb_start_autotune - total cache size: 1834683801 B; DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [3563] [28/Oct/2020:23:28:47.029986039 -0400] - INFO - slapd_daemon - slapd started. Listening on All Interfaces port 39001 for LDAP requests DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [3688] [28/Oct/2020:23:28:47.033980461 -0400] - INFO - slapd_daemon - Listening on /var/run/slapd-master1.socket for LDAPI requests DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [3876] [28/Oct/2020:23:28:56.369300659 -0400] - NOTICE - NSMMReplicationPlugin - changelog program - _cl5ConstructRUV - Rebuilding the replication changelog RUV, this may take several minutes... DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [4056] [28/Oct/2020:23:28:56.372450566 -0400] - NOTICE - NSMMReplicationPlugin - changelog program - _cl5ConstructRUV - Rebuilding replication changelog RUV complete. Result 0 (Success) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [4244] [28/Oct/2020:23:28:56.375031507 -0400] - NOTICE - NSMMReplicationPlugin - changelog program - _cl5ConstructRUV - Rebuilding the replication changelog RUV, this may take several minutes... DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [4424] [28/Oct/2020:23:28:56.378081526 -0400] - NOTICE - NSMMReplicationPlugin - changelog program - _cl5ConstructRUV - Rebuilding replication changelog RUV complete. Result 0 (Success) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [4595] [28/Oct/2020:23:28:57.784820043 -0400] - INFO - NSMMReplicationPlugin - repl5_tot_run - Beginning total update of replica "agmt="cn=temp_201" (ci-vm-10-0-136-251:39201)". DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [4779] [28/Oct/2020:23:28:57.791562898 -0400] - NOTICE - NSMMReplicationPlugin - replica_subentry_check - Need to create replication keep alive entry <cn=repl keep alive 1,dc=example,dc=com> DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [4924] [28/Oct/2020:23:28:57.795464607 -0400] - INFO - NSMMReplicationPlugin - replica_subentry_create - add dn: cn=repl keep alive 1,dc=example,dc=com DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [4941] objectclass: top DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [4967] objectclass: ldapsubentry DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [4997] objectclass: extensibleObject DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [5019] cn: repl keep alive 1 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [5206] [28/Oct/2020:23:29:00.370764002 -0400] - INFO - NSMMReplicationPlugin - repl5_tot_run - Finished total update of replica "agmt="cn=temp_201" (ci-vm-10-0-136-251:39201)". Sent 16 entries. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [5206] DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:67 _pattern_errorlog: end at offset 5206
Passed suites/schema/schema_replication_test.py::test_schema_replication_two 11.50
-------------------------------Captured log call--------------------------------
INFO  lib389:schema_replication_test.py:41 ############################################### INFO  lib389:schema_replication_test.py:42 ####### INFO  lib389:schema_replication_test.py:43 ####### Extra OC Schema is pushed - (ticket 47721 allows to learn missing def) INFO  lib389:schema_replication_test.py:44 ####### INFO  lib389:schema_replication_test.py:45 ################################################### DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'2' (expected 3) INFO  lib389:agreement.py:1194 Pausing replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'3' (expected 4) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:289 test_schema_replication_two master_schema_csn=b'5f9a370c000000000000' DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:290 test_schema_replication_two consumer_schema_csn=b'5f9a370c000000000000' DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:55 _pattern_errorlog: start at offset 5207 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [5420] 28/Oct/2020:23:29:16.743132607 -0400] - ERR - NSMMReplicationPlugin - update_consumer_schema - [S] Schema agmt="cn=201" (ci-vm-10-0-136-251:39201) must not be overwritten (set replication log for additional info) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:67 _pattern_errorlog: end at offset 5420
Passed suites/schema/schema_replication_test.py::test_schema_replication_three 9.40
-------------------------------Captured log call--------------------------------
INFO  lib389:schema_replication_test.py:41 ############################################### INFO  lib389:schema_replication_test.py:42 ####### INFO  lib389:schema_replication_test.py:43 ####### Extra OC Schema is pushed - no error INFO  lib389:schema_replication_test.py:44 ####### INFO  lib389:schema_replication_test.py:45 ################################################### DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'4' (expected 5) INFO  lib389:agreement.py:1194 Pausing replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'5' (expected 6) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:340 test_schema_replication_three master_schema_csn=b'5f9a3716000000000000' DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:341 test_schema_replication_three consumer_schema_csn=b'5f9a3716000000000000' DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:55 _pattern_errorlog: start at offset 5421 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [5421] DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:67 _pattern_errorlog: end at offset 5421
Passed suites/schema/schema_replication_test.py::test_schema_replication_four 9.40
-------------------------------Captured log call--------------------------------
INFO  lib389:schema_replication_test.py:41 ############################################### INFO  lib389:schema_replication_test.py:42 ####### INFO  lib389:schema_replication_test.py:43 ####### Same OC - extra MUST: Schema is pushed - no error INFO  lib389:schema_replication_test.py:44 ####### INFO  lib389:schema_replication_test.py:45 ################################################### DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'6' (expected 7) INFO  lib389:agreement.py:1194 Pausing replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'7' (expected 8) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:389 test_schema_replication_four master_schema_csn=b'5f9a371f000000000000' DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:390 ctest_schema_replication_four onsumer_schema_csn=b'5f9a371f000000000000' DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:55 _pattern_errorlog: start at offset 5422 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [5422] DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:67 _pattern_errorlog: end at offset 5422
Passed suites/schema/schema_replication_test.py::test_schema_replication_five 11.75
-------------------------------Captured log call--------------------------------
INFO  lib389:schema_replication_test.py:41 ############################################### INFO  lib389:schema_replication_test.py:42 ####### INFO  lib389:schema_replication_test.py:43 ####### Same OC - extra MUST: Schema is pushed - (fix for 47721) INFO  lib389:schema_replication_test.py:44 ####### INFO  lib389:schema_replication_test.py:45 ################################################### DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'8' (expected 9) INFO  lib389:agreement.py:1194 Pausing replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'9' (expected 10) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:452 test_schema_replication_five master_schema_csn=b'5f9a372b000000000000' DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:453 ctest_schema_replication_five onsumer_schema_csn=b'5f9a372b000000000000' DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:55 _pattern_errorlog: start at offset 5423 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [5528] /Oct/2020:23:29:47.324728563 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [5650] [28/Oct/2020:23:29:47.338068885 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 5f9a37270002:1603942183:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [5771] [28/Oct/2020:23:29:47.341921133 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 5f9a372b0000:1603942187:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [5934] [28/Oct/2020:23:29:47.345461474 -0400] - DEBUG - NSMMReplicationPlugin - ruv_add_csn_inprogress - Successfully inserted csn 5f9a372b000000010000 into pending list DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [6114] [28/Oct/2020:23:29:47.348686882 -0400] - DEBUG - NSMMReplicationPlugin - purge_entry_state_information - From entry cn=test_entry,dc=example,dc=com up to CSN 5f90fca7000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [6359] [28/Oct/2020:23:29:47.352768703 -0400] - DEBUG - NSMMReplicationPlugin - write_changelog_and_ruv - Writing change for cn=test_entry,dc=example,dc=com (uniqid: e2746602-199611eb-bbbd91ac-3101a0f1, optype: 8) to changelog csn 5f9a372b000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [6534] [28/Oct/2020:23:29:47.356591814 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - cl5WriteOperationTxn - Successfully written entry with csn (5f9a372b000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [6672] [28/Oct/2020:23:29:47.359840524 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: committing all csns for csn 5f9a372b000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [6802] [28/Oct/2020:23:29:47.368005685 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: processing data csn 5f9a372b000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [6940] [28/Oct/2020:23:29:47.371841318 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Successfully committed csn 5f9a372b000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [7068] [28/Oct/2020:23:29:47.374986951 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Rolled up to csn 5f9a372b000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [7183] [28/Oct/2020:23:29:47.378032969 -0400] - DEBUG - replication - multimaster_mmr_postop - error 0 for operation 561. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [7358] [28/Oct/2020:23:29:47.383277972 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: wait_for_changes -> wait_for_changes DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [7541] [28/Oct/2020:23:29:47.386618723 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: wait_for_changes -> ready_to_acquire_replica DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [7713] [28/Oct/2020:23:29:47.390962427 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-136-251:39201) - Canceling linger on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [7881] [28/Oct/2020:23:29:47.397919133 -0400] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-136-251:39201): Replica was successfully acquired. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [8063] [28/Oct/2020:23:29:47.401079431 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: ready_to_acquire_replica -> sending_updates DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [8247] [28/Oct/2020:23:29:47.403857031 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Checking consumer schema localcsn:5f9a372b000000000000 / remotecsn:5f9a371f000000000000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [8411] [28/Oct/2020:23:29:47.533617827 -0400] - DEBUG - schema_oc_compare_strict - Attribute telexNumber is not required in 'consumerNewOCA' of the remote consumer schema DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [8565] [28/Oct/2020:23:29:47.536826435 -0400] - DEBUG - schema_oc_superset_check - Remote consumerNewOCA schema objectclasses is a superset of the received one. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [8729] [28/Oct/2020:23:29:47.541482784 -0400] - DEBUG - schema_oc_compare_strict - Attribute telexNumber is not required in 'consumerNewOCA' of the remote consumer schema DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [8879] [28/Oct/2020:23:29:47.544658846 -0400] - DEBUG - schema_list_oc2learn - Add that unknown/extended objectclass consumerNewOCA (1.2.3.4.5.6.7.8.9.10.1) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [9165] [28/Oct/2020:23:29:47.548320218 -0400] - DEBUG - schema_oc_to_string - Replace (old[251]=( 1.2.3.4.5.6.7.8.9.10.1 NAME 'consumerNewOCA' DESC 'To test ticket 47490' SUP 'person' AUXILIARY MUST ( postalAddress $ preferredLocale ) MAY ( postalCode $ street ) X-ORIGIN 'blahblahblah' )) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [9478] [28/Oct/2020:23:29:47.551263041 -0400] - DEBUG - supplier_get_new_definitions - supplier takes objectclass: ( 1.2.3.4.5.6.7.8.9.10.1 NAME 'consumerNewOCA' DESC 'To test ticket 47490' SUP person AUXILIARY MUST ( postalAddress $ preferredLocale $ telexNumber ) MAY ( postalCode $ street ) X-ORIGIN 'user defined' ) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [9780] [28/Oct/2020:23:29:47.565777494 -0400] - DEBUG - modify_schema_prepare_mods - MOD[1] del (objectclasses): ( 1.2.3.4.5.6.7.8.9.10.1 NAME 'consumerNewOCA' DESC 'To test ticket 47490' SUP 'person' AUXILIARY MUST ( postalAddress $ preferredLocale ) MAY ( postalCode $ street ) X-ORIGIN 'blahblahblah' ) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [10091] [28/Oct/2020:23:29:47.570384983 -0400] - DEBUG - modify_schema_prepare_mods - MOD[0] add (objectclasses): ( 1.2.3.4.5.6.7.8.9.10.1 NAME 'consumerNewOCA' DESC 'To test ticket 47490' SUP person AUXILIARY MUST ( postalAddress $ preferredLocale $ telexNumber ) MAY ( postalCode $ street ) X-ORIGIN 'user defined' ) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [10199] [28/Oct/2020:23:29:47.575194121 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [10322] [28/Oct/2020:23:29:47.579830390 -0400] - DEBUG - modify_schema_internal_mod - Successfully learn objectclasses definitions DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [10536] [28/Oct/2020:23:29:47.585135577 -0400] - ERR - NSMMReplicationPlugin - update_consumer_schema - [S] Schema agmt="cn=201" (ci-vm-10-0-136-251:39201) must not be overwritten (set replication log for additional info) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:67 _pattern_errorlog: end at offset 10536
Passed suites/schema/schema_replication_test.py::test_schema_replication_six 9.63
-------------------------------Captured log call--------------------------------
INFO  lib389:schema_replication_test.py:41 ############################################### INFO  lib389:schema_replication_test.py:42 ####### INFO  lib389:schema_replication_test.py:43 ####### Same OC - extra MUST: Schema is pushed - no error INFO  lib389:schema_replication_test.py:44 ####### INFO  lib389:schema_replication_test.py:45 ################################################### DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'10' (expected 11) INFO  lib389:agreement.py:1194 Pausing replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'11' (expected 12) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:508 test_schema_replication_six master_schema_csn=b'5f9a3735000000000000' DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:509 ctest_schema_replication_six onsumer_schema_csn=b'5f9a3735000000000000' DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:55 _pattern_errorlog: start at offset 10537 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [10673] 28/Oct/2020:23:29:47.588947680 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] schema definitions may have been learned DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [10807] [28/Oct/2020:23:29:47.790766593 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Reread remotecsn:5f9a3729000000000000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [10992] [28/Oct/2020:23:29:47.794270503 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - Schema checking successful: ok to push the schema (agmt="cn=201" (ci-vm-10-0-136-251:39201)) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [11107] [28/Oct/2020:23:29:47.990639771 -0400] - DEBUG - csngen_adjust_time - gen state before 5f9a372b0002:1603942187:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [11288] [28/Oct/2020:23:29:47.996306848 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-136-251:39201)): Consumer RUV: DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [11444] [28/Oct/2020:23:29:47.999153002 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replicageneration} 5f9a36f8000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [11657] [28/Oct/2020:23:29:48.001916425 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replica 1 ldap://localhost.localdomain:39001} 5f9a36f8000100010000 5f9a3727000000010000 00000000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [11838] [28/Oct/2020:23:29:48.004728192 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-136-251:39201)): Supplier RUV: DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [11994] [28/Oct/2020:23:29:48.007255608 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replicageneration} 5f9a36f8000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [12207] [28/Oct/2020:23:29:48.009614005 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replica 1 ldap://localhost.localdomain:39001} 5f9a36f8000100010000 5f9a372b000000010000 5f9a372b DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [12369] [28/Oct/2020:23:29:48.011920893 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_get_buffer - found thread private buffer cache 0x7ff560e7a500 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [12596] [28/Oct/2020:23:29:48.015184310 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_get_buffer - _pool is 0x7ff58e23f2e0 _pool->pl_busy_lists is 0x7ff560f05ae0 _pool->pl_busy_lists->bl_buffers is 0x7ff560e7a500 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [12892] [28/Oct/2020:23:29:48.017892952 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-136-251:39201) - (cscb 0 - state 0) - csnPrevMax () csnMax (5f9a372b000000010000) csnBuf (5f9a3727000000010000) csnConsumerMax (5f9a3727000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [13005] [28/Oct/2020:23:29:48.020171638 -0400] - DEBUG - clcache_initial_anchorcsn - anchor is now: 5f9a3727000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [13196] [28/Oct/2020:23:29:48.022675476 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - agmt="cn=201" (ci-vm-10-0-136-251:39201): CSN 5f9a3727000000010000 found, position set for replay DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [13352] [28/Oct/2020:23:29:48.025254695 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_get_next_change - load=1 rec=1 csn=5f9a372b000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [13464] [28/Oct/2020:23:29:48.027849322 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Starting DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [13596] [28/Oct/2020:23:29:48.030119691 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [13816] [28/Oct/2020:23:29:48.033141864 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-136-251:39201): Sending modify operation (dn="cn=test_entry,dc=example,dc=com" csn=5f9a372b000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [13948] [28/Oct/2020:23:29:48.035820529 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [14146] [28/Oct/2020:23:29:48.047080421 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-136-251:39201): Consumer successfully sent operation with csn 5f9a372b000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [14279] [28/Oct/2020:23:29:48.050048624 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 19 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [14409] [28/Oct/2020:23:29:48.052827302 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Result 3, 0, 0, 19, (null) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [14542] [28/Oct/2020:23:29:48.055790433 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 19 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [14857] [28/Oct/2020:23:29:48.058250597 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_adjust_anchorcsn - agmt="cn=201" (ci-vm-10-0-136-251:39201) - (cscb 0 - state 1) - csnPrevMax (5f9a372b000000010000) csnMax (5f9a372b000000010000) csnBuf (5f9a372b000000010000) csnConsumerMax (5f9a372b000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [14981] [28/Oct/2020:23:29:48.062128355 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_load_buffer - rc=-30988 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [15165] [28/Oct/2020:23:29:48.064727386 -0400] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-136-251:39201): No more updates to send (cl5GetNextOperationToReplay) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [15278] [28/Oct/2020:23:29:48.067446430 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 19 19 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [15411] [28/Oct/2020:23:29:48.070111433 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 19 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [15520] [28/Oct/2020:23:29:48.077118830 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain exiting DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [15792] [28/Oct/2020:23:29:48.080283325 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_return_buffer - session end: state=5 load=1 sent=1 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [15956] [28/Oct/2020:23:29:48.086385155 -0400] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-136-251:39201): Successfully released consumer DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [16126] [28/Oct/2020:23:29:48.089193531 -0400] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-136-251:39201) - Beginning linger on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [16300] [28/Oct/2020:23:29:48.092006682 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: sending_updates -> wait_for_changes DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [16408] [28/Oct/2020:23:29:48.390833204 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [16579] [28/Oct/2020:23:29:48.395268536 -0400] - DEBUG - NSMMReplicationPlugin - agmt_set_enabled_from_entry: agreement is now disabled (agmt="cn=201" (ci-vm-10-0-136-251:39201)) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [16754] [28/Oct/2020:23:29:48.398186346 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: wait_for_changes -> wait_for_changes DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [16929] [28/Oct/2020:23:29:48.401271768 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: wait_for_changes -> wait_for_changes DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [17101] [28/Oct/2020:23:29:48.404602505 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-136-251:39201) - Canceling linger on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [17276] [28/Oct/2020:23:29:48.407771772 -0400] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-136-251:39201) - Disconnected from the consumer DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [17441] [28/Oct/2020:23:29:48.498360917 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_stop - agmt="cn=201" (ci-vm-10-0-136-251:39201): Protocol stopped after 0 seconds DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [17569] [28/Oct/2020:23:29:48.502228076 -0400] - DEBUG - NSMMReplicationPlugin - Database RUV: {replicageneration} 5f9a36f8000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [17754] [28/Oct/2020:23:29:48.505663433 -0400] - DEBUG - NSMMReplicationPlugin - Database RUV: {replica 1 ldap://localhost.localdomain:39001} 5f9a36f8000100010000 5f9a372b000000010000 5f9a372b DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [17929] [28/Oct/2020:23:29:48.508967527 -0400] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-136-251:39201) - Disconnected from the consumer DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [18037] [28/Oct/2020:23:29:48.512599835 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [18145] [28/Oct/2020:23:29:53.539666890 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [18315] [28/Oct/2020:23:29:53.547096066 -0400] - DEBUG - NSMMReplicationPlugin - agmt_set_enabled_from_entry: agreement is now enabled (agmt="cn=201" (ci-vm-10-0-136-251:39201)) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [18490] [28/Oct/2020:23:29:53.551162078 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-136-251:39201) - No linger to cancel on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [18665] [28/Oct/2020:23:29:53.554631247 -0400] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-136-251:39201) - Disconnected from the consumer DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [18837] [28/Oct/2020:23:29:53.562004590 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: start -> ready_to_acquire_replica DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [19006] [28/Oct/2020:23:29:53.566099269 -0400] - DEBUG - NSMMReplicationPlugin - conn_connect - agmt="cn=201" (ci-vm-10-0-136-251:39201) - Trying non-secure slapi_ldap_init_ext DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [19534] [28/Oct/2020:23:29:53.569102938 -0400] - DEBUG - NSMMReplicationPlugin - conn_connect - agmt="cn=201" (ci-vm-10-0-136-251:39201) - binddn = cn=ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701,ou=services,dc=example,dc=com, passwd = {AES-TUhNR0NTcUdTSWIzRFFFRkRUQm1NRVVHQ1NxR1NJYjNEUUVGRERBNEJDUmtaVFV5WlRsbVl5MWtORGhrTTJJdw0KTlMxaFl6VTNNamcwT1Mxak1XVm1aVEEyTlFBQ0FRSUNBU0F3Q2dZSUtvWklodmNOQWdjd0hRWUpZSVpJQVdVRA0KQkFFcUJCRDhpTkdqOTI2RVV0eS9mSGtIMW1INw==}1aP0CGo7VTyXKYAH9mQPJxPbmZao4qtbDnejHuqj6hueREUbjTmpIFJ9v+wKLJM0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [19579] hwtXYLylnARfBAm0SlP5kfwhZ0UpujEuyyXSDnqmiaE= DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [19754] [28/Oct/2020:23:29:53.573336824 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-136-251:39201) - No linger to cancel on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [19876] [28/Oct/2020:23:29:53.578559282 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 5f9a372b0002:1603942187:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [19997] [28/Oct/2020:23:29:53.581185821 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 5f9a37310000:1603942193:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [20165] [28/Oct/2020:23:29:53.585696523 -0400] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-136-251:39201): Replica was successfully acquired. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [20347] [28/Oct/2020:23:29:53.588839136 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: ready_to_acquire_replica -> sending_updates DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [20531] [28/Oct/2020:23:29:53.592194307 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Checking consumer schema localcsn:5f9a372b000000000000 / remotecsn:5f9a3729000000000000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [20665] [28/Oct/2020:23:29:53.811947592 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Reread remotecsn:5f9a372b000000000000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [20780] [28/Oct/2020:23:29:53.815525594 -0400] - DEBUG - csngen_adjust_time - gen state before 5f9a37310001:1603942193:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [20961] [28/Oct/2020:23:29:53.818486936 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-136-251:39201)): Consumer RUV: DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [21117] [28/Oct/2020:23:29:53.825178551 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replicageneration} 5f9a36f8000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [21330] [28/Oct/2020:23:29:53.830170393 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replica 1 ldap://localhost.localdomain:39001} 5f9a36f8000100010000 5f9a372b000000010000 00000000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [21511] [28/Oct/2020:23:29:53.835569895 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-136-251:39201)): Supplier RUV: DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [21667] [28/Oct/2020:23:29:53.838740189 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replicageneration} 5f9a36f8000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [21880] [28/Oct/2020:23:29:53.843262798 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replica 1 ldap://localhost.localdomain:39001} 5f9a36f8000100010000 5f9a372b000000010000 5f9a372b DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [22176] [28/Oct/2020:23:29:53.847199877 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-136-251:39201) - (cscb 0 - state 1) - csnPrevMax () csnMax (5f9a372b000000010000) csnBuf (00000000000000000000) csnConsumerMax (5f9a372b000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [22300] [28/Oct/2020:23:29:53.850025363 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_load_buffer - rc=-30988 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [22572] [28/Oct/2020:23:29:53.852917213 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_return_buffer - session end: state=5 load=0 sent=0 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [22721] [28/Oct/2020:23:29:53.856180196 -0400] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-136-251:39201): No changes to send DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [22885] [28/Oct/2020:23:29:53.862568314 -0400] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-136-251:39201): Successfully released consumer DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [23055] [28/Oct/2020:23:29:53.868145031 -0400] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-136-251:39201) - Beginning linger on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [23229] [28/Oct/2020:23:29:53.871827500 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: sending_updates -> wait_for_changes DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [23351] [28/Oct/2020:23:29:55.559767725 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 5f9a37310001:1603942193:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [23472] [28/Oct/2020:23:29:55.579346534 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 5f9a37330000:1603942195:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [23635] [28/Oct/2020:23:29:55.582970792 -0400] - DEBUG - NSMMReplicationPlugin - ruv_add_csn_inprogress - Successfully inserted csn 5f9a3733000000010000 into pending list DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [23815] [28/Oct/2020:23:29:55.591071075 -0400] - DEBUG - NSMMReplicationPlugin - purge_entry_state_information - From entry cn=test_entry,dc=example,dc=com up to CSN 5f90fcab000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [24060] [28/Oct/2020:23:29:55.597549633 -0400] - DEBUG - NSMMReplicationPlugin - write_changelog_and_ruv - Writing change for cn=test_entry,dc=example,dc=com (uniqid: e2746602-199611eb-bbbd91ac-3101a0f1, optype: 8) to changelog csn 5f9a3733000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [24235] [28/Oct/2020:23:29:55.601635985 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - cl5WriteOperationTxn - Successfully written entry with csn (5f9a3733000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [24373] [28/Oct/2020:23:29:55.606228050 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: committing all csns for csn 5f9a3733000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [24503] [28/Oct/2020:23:29:55.609301045 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: processing data csn 5f9a3733000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [24641] [28/Oct/2020:23:29:55.612457968 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Successfully committed csn 5f9a3733000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [24769] [28/Oct/2020:23:29:55.619165266 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Rolled up to csn 5f9a3733000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [24884] [28/Oct/2020:23:29:55.623345389 -0400] - DEBUG - replication - multimaster_mmr_postop - error 0 for operation 561. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [25059] [28/Oct/2020:23:29:55.633724101 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: wait_for_changes -> wait_for_changes DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [25242] [28/Oct/2020:23:29:55.637121792 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: wait_for_changes -> ready_to_acquire_replica DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [25414] [28/Oct/2020:23:29:55.640309467 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-136-251:39201) - Canceling linger on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [25582] [28/Oct/2020:23:29:55.645167332 -0400] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-136-251:39201): Replica was successfully acquired. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [25764] [28/Oct/2020:23:29:55.650291436 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: ready_to_acquire_replica -> sending_updates DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [25879] [28/Oct/2020:23:29:55.656497790 -0400] - DEBUG - csngen_adjust_time - gen state before 5f9a37330002:1603942195:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [26060] [28/Oct/2020:23:29:55.661067730 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-136-251:39201)): Consumer RUV: DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [26216] [28/Oct/2020:23:29:55.670105327 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replicageneration} 5f9a36f8000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [26429] [28/Oct/2020:23:29:55.674015250 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replica 1 ldap://localhost.localdomain:39001} 5f9a36f8000100010000 5f9a372b000000010000 00000000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [26610] [28/Oct/2020:23:29:55.677604481 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-136-251:39201)): Supplier RUV: DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [26766] [28/Oct/2020:23:29:55.680949576 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replicageneration} 5f9a36f8000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [26979] [28/Oct/2020:23:29:55.684460032 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replica 1 ldap://localhost.localdomain:39001} 5f9a36f8000100010000 5f9a3733000000010000 5f9a3733 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [27141] [28/Oct/2020:23:29:55.687866196 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_get_buffer - found thread private buffer cache 0x7ff56003b300 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [27368] [28/Oct/2020:23:29:55.692405131 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_get_buffer - _pool is 0x7ff58e23f2e0 _pool->pl_busy_lists is 0x7ff560f05ae0 _pool->pl_busy_lists->bl_buffers is 0x7ff56003b300 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [27664] [28/Oct/2020:23:29:55.697115176 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-136-251:39201) - (cscb 0 - state 0) - csnPrevMax () csnMax (5f9a3733000000010000) csnBuf (00000000000000000000) csnConsumerMax (5f9a372b000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [27777] [28/Oct/2020:23:29:55.701187965 -0400] - DEBUG - clcache_initial_anchorcsn - anchor is now: 5f9a372b000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [27968] [28/Oct/2020:23:29:55.704867720 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - agmt="cn=201" (ci-vm-10-0-136-251:39201): CSN 5f9a372b000000010000 found, position set for replay DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [28124] [28/Oct/2020:23:29:55.708385086 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_get_next_change - load=1 rec=1 csn=5f9a3733000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [28236] [28/Oct/2020:23:29:55.713596572 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Starting DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [28368] [28/Oct/2020:23:29:55.719930439 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [28588] [28/Oct/2020:23:29:55.723639362 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-136-251:39201): Sending modify operation (dn="cn=test_entry,dc=example,dc=com" csn=5f9a3733000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [28720] [28/Oct/2020:23:29:55.727474763 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [28918] [28/Oct/2020:23:29:55.730915171 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-136-251:39201): Consumer successfully sent operation with csn 5f9a3733000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [29233] [28/Oct/2020:23:29:55.736921967 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_adjust_anchorcsn - agmt="cn=201" (ci-vm-10-0-136-251:39201) - (cscb 0 - state 1) - csnPrevMax (5f9a3733000000010000) csnMax (5f9a3733000000010000) csnBuf (5f9a3733000000010000) csnConsumerMax (5f9a3733000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [29357] [28/Oct/2020:23:29:55.740122648 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_load_buffer - rc=-30988 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [29541] [28/Oct/2020:23:29:55.743462024 -0400] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-136-251:39201): No more updates to send (cl5GetNextOperationToReplay) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [29653] [28/Oct/2020:23:29:55.755103911 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 0 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [29785] [28/Oct/2020:23:29:55.759569340 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [29918] [28/Oct/2020:23:29:55.767553544 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [30048] [28/Oct/2020:23:29:55.770329315 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Result 3, 0, 0, 10, (null) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [30181] [28/Oct/2020:23:29:55.776289349 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [30314] [28/Oct/2020:23:29:55.782314788 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [30447] [28/Oct/2020:23:29:55.787665723 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [30580] [28/Oct/2020:23:29:55.795434783 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [30713] [28/Oct/2020:23:29:55.808757939 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [30846] [28/Oct/2020:23:29:55.832336260 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [30959] [28/Oct/2020:23:29:55.859715991 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 10 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [31068] [28/Oct/2020:23:29:55.869120666 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain exiting DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [31340] [28/Oct/2020:23:29:55.873403307 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_return_buffer - session end: state=5 load=1 sent=1 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [31504] [28/Oct/2020:23:29:55.879918714 -0400] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-136-251:39201): Successfully released consumer DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [31674] [28/Oct/2020:23:29:55.883139446 -0400] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-136-251:39201) - Beginning linger on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [31848] [28/Oct/2020:23:29:55.886448644 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: sending_updates -> wait_for_changes DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [31956] [28/Oct/2020:23:29:56.999976491 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [32064] [28/Oct/2020:23:29:57.065747791 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [32186] [28/Oct/2020:23:29:57.082579301 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 5f9a37330002:1603942195:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [32307] [28/Oct/2020:23:29:57.087016749 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 5f9a37350000:1603942197:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [32470] [28/Oct/2020:23:29:57.091068604 -0400] - DEBUG - NSMMReplicationPlugin - ruv_add_csn_inprogress - Successfully inserted csn 5f9a3735000000010000 into pending list DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [32650] [28/Oct/2020:23:29:57.095035095 -0400] - DEBUG - NSMMReplicationPlugin - purge_entry_state_information - From entry cn=test_entry,dc=example,dc=com up to CSN 5f90fcb3000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [32895] [28/Oct/2020:23:29:57.099864740 -0400] - DEBUG - NSMMReplicationPlugin - write_changelog_and_ruv - Writing change for cn=test_entry,dc=example,dc=com (uniqid: e2746602-199611eb-bbbd91ac-3101a0f1, optype: 8) to changelog csn 5f9a3735000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [33070] [28/Oct/2020:23:29:57.103818005 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - cl5WriteOperationTxn - Successfully written entry with csn (5f9a3735000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [33208] [28/Oct/2020:23:29:57.107741114 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: committing all csns for csn 5f9a3735000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [33338] [28/Oct/2020:23:29:57.111993409 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: processing data csn 5f9a3735000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [33476] [28/Oct/2020:23:29:57.116241742 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Successfully committed csn 5f9a3735000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [33604] [28/Oct/2020:23:29:57.120156395 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Rolled up to csn 5f9a3735000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [33719] [28/Oct/2020:23:29:57.126568417 -0400] - DEBUG - replication - multimaster_mmr_postop - error 0 for operation 561. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [33894] [28/Oct/2020:23:29:57.132615014 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: wait_for_changes -> wait_for_changes DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [34077] [28/Oct/2020:23:29:57.136597364 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: wait_for_changes -> ready_to_acquire_replica DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [34249] [28/Oct/2020:23:29:57.140603432 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-136-251:39201) - Canceling linger on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [34417] [28/Oct/2020:23:29:57.147602744 -0400] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-136-251:39201): Replica was successfully acquired. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [34599] [28/Oct/2020:23:29:57.151239896 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: ready_to_acquire_replica -> sending_updates DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [34783] [28/Oct/2020:23:29:57.154661766 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Checking consumer schema localcsn:5f9a3735000000000000 / remotecsn:5f9a372b000000000000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [34917] [28/Oct/2020:23:29:57.455007667 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Reread remotecsn:5f9a372b000000000000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [35102] [28/Oct/2020:23:29:57.459278704 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - Schema checking successful: ok to push the schema (agmt="cn=201" (ci-vm-10-0-136-251:39201)) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [35217] [28/Oct/2020:23:29:57.650583155 -0400] - DEBUG - csngen_adjust_time - gen state before 5f9a37350002:1603942197:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [35398] [28/Oct/2020:23:29:57.656736614 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-136-251:39201)): Consumer RUV: DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [35554] [28/Oct/2020:23:29:57.660887996 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replicageneration} 5f9a36f8000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [35767] [28/Oct/2020:23:29:57.664333614 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replica 1 ldap://localhost.localdomain:39001} 5f9a36f8000100010000 5f9a3733000000010000 00000000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [35948] [28/Oct/2020:23:29:57.667853418 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-136-251:39201)): Supplier RUV: DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [36104] [28/Oct/2020:23:29:57.674458976 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replicageneration} 5f9a36f8000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [36317] [28/Oct/2020:23:29:57.678571143 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replica 1 ldap://localhost.localdomain:39001} 5f9a36f8000100010000 5f9a3735000000010000 5f9a3735 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [36479] [28/Oct/2020:23:29:57.685114795 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_get_buffer - found thread private buffer cache 0x7ff56003b300 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [36706] [28/Oct/2020:23:29:57.689212237 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_get_buffer - _pool is 0x7ff58e23f2e0 _pool->pl_busy_lists is 0x7ff560f05ae0 _pool->pl_busy_lists->bl_buffers is 0x7ff56003b300 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [37002] [28/Oct/2020:23:29:57.692954056 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-136-251:39201) - (cscb 0 - state 0) - csnPrevMax () csnMax (5f9a3735000000010000) csnBuf (5f9a3733000000010000) csnConsumerMax (5f9a3733000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [37115] [28/Oct/2020:23:29:57.696847077 -0400] - DEBUG - clcache_initial_anchorcsn - anchor is now: 5f9a3733000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [37306] [28/Oct/2020:23:29:57.702662785 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - agmt="cn=201" (ci-vm-10-0-136-251:39201): CSN 5f9a3733000000010000 found, position set for replay DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [37462] [28/Oct/2020:23:29:57.706316560 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_get_next_change - load=1 rec=1 csn=5f9a3735000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [37574] [28/Oct/2020:23:29:57.709880755 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Starting DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [37706] [28/Oct/2020:23:29:57.714814796 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [37926] [28/Oct/2020:23:29:57.718512662 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-136-251:39201): Sending modify operation (dn="cn=test_entry,dc=example,dc=com" csn=5f9a3735000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [38058] [28/Oct/2020:23:29:57.722295026 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [38256] [28/Oct/2020:23:29:57.728499814 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-136-251:39201): Consumer successfully sent operation with csn 5f9a3735000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [38571] [28/Oct/2020:23:29:57.732174723 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_adjust_anchorcsn - agmt="cn=201" (ci-vm-10-0-136-251:39201) - (cscb 0 - state 1) - csnPrevMax (5f9a3735000000010000) csnMax (5f9a3735000000010000) csnBuf (5f9a3735000000010000) csnConsumerMax (5f9a3735000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [38695] [28/Oct/2020:23:29:57.736789267 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_load_buffer - rc=-30988 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [38879] [28/Oct/2020:23:29:57.745571245 -0400] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-136-251:39201): No more updates to send (cl5GetNextOperationToReplay) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [38991] [28/Oct/2020:23:29:57.751110276 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 0 17 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [39124] [28/Oct/2020:23:29:57.754671696 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 17 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [39254] [28/Oct/2020:23:29:57.758574921 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Result 3, 0, 0, 17, (null) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [39387] [28/Oct/2020:23:29:57.762996105 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 17 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [39520] [28/Oct/2020:23:29:57.769576336 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 17 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [39653] [28/Oct/2020:23:29:57.779697913 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 17 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [39786] [28/Oct/2020:23:29:57.790108790 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 17 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [39919] [28/Oct/2020:23:29:57.803248692 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 17 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [40052] [28/Oct/2020:23:29:57.827710534 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 17 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [40165] [28/Oct/2020:23:29:57.854866047 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 17 17 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [40274] [28/Oct/2020:23:29:57.868378823 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain exiting DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [40546] [28/Oct/2020:23:29:57.879439205 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_return_buffer - session end: state=5 load=1 sent=1 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [40710] [28/Oct/2020:23:29:57.886699315 -0400] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-136-251:39201): Successfully released consumer DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [40880] [28/Oct/2020:23:29:57.891960662 -0400] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-136-251:39201) - Beginning linger on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [41054] [28/Oct/2020:23:29:57.895446242 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: sending_updates -> wait_for_changes DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [41162] [28/Oct/2020:23:29:58.138282347 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [41333] [28/Oct/2020:23:29:58.147215041 -0400] - DEBUG - NSMMReplicationPlugin - agmt_set_enabled_from_entry: agreement is now disabled (agmt="cn=201" (ci-vm-10-0-136-251:39201)) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [41508] [28/Oct/2020:23:29:58.151254047 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: wait_for_changes -> wait_for_changes DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [41683] [28/Oct/2020:23:29:58.155192927 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: wait_for_changes -> wait_for_changes DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [41855] [28/Oct/2020:23:29:58.158305228 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-136-251:39201) - Canceling linger on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [42030] [28/Oct/2020:23:29:58.161805620 -0400] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-136-251:39201) - Disconnected from the consumer DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [42195] [28/Oct/2020:23:29:58.251413180 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_stop - agmt="cn=201" (ci-vm-10-0-136-251:39201): Protocol stopped after 0 seconds DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [42323] [28/Oct/2020:23:29:58.256009254 -0400] - DEBUG - NSMMReplicationPlugin - Database RUV: {replicageneration} 5f9a36f8000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [42508] [28/Oct/2020:23:29:58.259340629 -0400] - DEBUG - NSMMReplicationPlugin - Database RUV: {replica 1 ldap://localhost.localdomain:39001} 5f9a36f8000100010000 5f9a3735000000010000 5f9a3735 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [42683] [28/Oct/2020:23:29:58.262896050 -0400] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-136-251:39201) - Disconnected from the consumer DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [42791] [28/Oct/2020:23:29:58.268960230 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [42899] [28/Oct/2020:23:29:58.895741152 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [43014] [28/Oct/2020:23:29:58.905587047 -0400] - DEBUG - replication - multimaster_mmr_postop - error 0 for operation 561. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [43122] [28/Oct/2020:23:30:03.287853122 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [43292] [28/Oct/2020:23:30:03.293619827 -0400] - DEBUG - NSMMReplicationPlugin - agmt_set_enabled_from_entry: agreement is now enabled (agmt="cn=201" (ci-vm-10-0-136-251:39201)) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [43467] [28/Oct/2020:23:30:03.297993450 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-136-251:39201) - No linger to cancel on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [43642] [28/Oct/2020:23:30:03.301777794 -0400] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-136-251:39201) - Disconnected from the consumer DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [43814] [28/Oct/2020:23:30:03.306233186 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: start -> ready_to_acquire_replica DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [43983] [28/Oct/2020:23:30:03.309861493 -0400] - DEBUG - NSMMReplicationPlugin - conn_connect - agmt="cn=201" (ci-vm-10-0-136-251:39201) - Trying non-secure slapi_ldap_init_ext DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [44511] [28/Oct/2020:23:30:03.313801072 -0400] - DEBUG - NSMMReplicationPlugin - conn_connect - agmt="cn=201" (ci-vm-10-0-136-251:39201) - binddn = cn=ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701,ou=services,dc=example,dc=com, passwd = {AES-TUhNR0NTcUdTSWIzRFFFRkRUQm1NRVVHQ1NxR1NJYjNEUUVGRERBNEJDUmtaVFV5WlRsbVl5MWtORGhrTTJJdw0KTlMxaFl6VTNNamcwT1Mxak1XVm1aVEEyTlFBQ0FRSUNBU0F3Q2dZSUtvWklodmNOQWdjd0hRWUpZSVpJQVdVRA0KQkFFcUJCRDhpTkdqOTI2RVV0eS9mSGtIMW1INw==}1aP0CGo7VTyXKYAH9mQPJxPbmZao4qtbDnejHuqj6hueREUbjTmpIFJ9v+wKLJM0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [44556] hwtXYLylnARfBAm0SlP5kfwhZ0UpujEuyyXSDnqmiaE= DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [44731] [28/Oct/2020:23:30:03.318794801 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-136-251:39201) - No linger to cancel on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [44853] [28/Oct/2020:23:30:03.324110852 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 5f9a37350002:1603942197:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [44974] [28/Oct/2020:23:30:03.326817004 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 5f9a373b0000:1603942203:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [45142] [28/Oct/2020:23:30:03.336767488 -0400] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-136-251:39201): Replica was successfully acquired. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [45324] [28/Oct/2020:23:30:03.343806633 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: ready_to_acquire_replica -> sending_updates DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [45508] [28/Oct/2020:23:30:03.351800496 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Checking consumer schema localcsn:5f9a3735000000000000 / remotecsn:5f9a372b000000000000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [45642] [28/Oct/2020:23:30:03.558591605 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Reread remotecsn:5f9a3735000000000000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [45757] [28/Oct/2020:23:30:03.563111258 -0400] - DEBUG - csngen_adjust_time - gen state before 5f9a373b0001:1603942203:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [45938] [28/Oct/2020:23:30:03.567314677 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-136-251:39201)): Consumer RUV: DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [46094] [28/Oct/2020:23:30:03.571198595 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replicageneration} 5f9a36f8000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [46307] [28/Oct/2020:23:30:03.575102711 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replica 1 ldap://localhost.localdomain:39001} 5f9a36f8000100010000 5f9a3735000000010000 00000000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [46488] [28/Oct/2020:23:30:03.579135503 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-136-251:39201)): Supplier RUV: DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [46644] [28/Oct/2020:23:30:03.583233705 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replicageneration} 5f9a36f8000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [46857] [28/Oct/2020:23:30:03.587761589 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replica 1 ldap://localhost.localdomain:39001} 5f9a36f8000100010000 5f9a3735000000010000 5f9a3735 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [47153] [28/Oct/2020:23:30:03.592143797 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-136-251:39201) - (cscb 0 - state 1) - csnPrevMax () csnMax (5f9a3735000000010000) csnBuf (00000000000000000000) csnConsumerMax (5f9a3735000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [47277] [28/Oct/2020:23:30:03.595846566 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_load_buffer - rc=-30988 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [47549] [28/Oct/2020:23:30:03.600007444 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_return_buffer - session end: state=5 load=0 sent=0 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [47698] [28/Oct/2020:23:30:03.604945698 -0400] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-136-251:39201): No changes to send DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [47862] [28/Oct/2020:23:30:03.627372001 -0400] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-136-251:39201): Successfully released consumer DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [48032] [28/Oct/2020:23:30:03.630966914 -0400] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-136-251:39201) - Beginning linger on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [48206] [28/Oct/2020:23:30:03.634052039 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: sending_updates -> wait_for_changes DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [48328] [28/Oct/2020:23:30:05.306869598 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 5f9a373b0001:1603942203:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [48449] [28/Oct/2020:23:30:05.311396289 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 5f9a373d0000:1603942205:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [48612] [28/Oct/2020:23:30:05.320447237 -0400] - DEBUG - NSMMReplicationPlugin - ruv_add_csn_inprogress - Successfully inserted csn 5f9a373d000000010000 into pending list DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [48792] [28/Oct/2020:23:30:05.324354233 -0400] - DEBUG - NSMMReplicationPlugin - purge_entry_state_information - From entry cn=test_entry,dc=example,dc=com up to CSN 5f90fcb5000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [49037] [28/Oct/2020:23:30:05.329095479 -0400] - DEBUG - NSMMReplicationPlugin - write_changelog_and_ruv - Writing change for cn=test_entry,dc=example,dc=com (uniqid: e2746602-199611eb-bbbd91ac-3101a0f1, optype: 8) to changelog csn 5f9a373d000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [49212] [28/Oct/2020:23:30:05.333139727 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - cl5WriteOperationTxn - Successfully written entry with csn (5f9a373d000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [49350] [28/Oct/2020:23:30:05.336925469 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: committing all csns for csn 5f9a373d000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [49480] [28/Oct/2020:23:30:05.340855360 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: processing data csn 5f9a373d000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [49618] [28/Oct/2020:23:30:05.344901517 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Successfully committed csn 5f9a373d000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [49746] [28/Oct/2020:23:30:05.348487331 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Rolled up to csn 5f9a373d000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [49861] [28/Oct/2020:23:30:05.393822714 -0400] - DEBUG - replication - multimaster_mmr_postop - error 0 for operation 561. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [50036] [28/Oct/2020:23:30:05.402260708 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: wait_for_changes -> wait_for_changes DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [50219] [28/Oct/2020:23:30:05.406735863 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: wait_for_changes -> ready_to_acquire_replica DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [50391] [28/Oct/2020:23:30:05.411415349 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-136-251:39201) - Canceling linger on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [50559] [28/Oct/2020:23:30:05.416126781 -0400] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-136-251:39201): Replica was successfully acquired. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [50741] [28/Oct/2020:23:30:05.419853107 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: ready_to_acquire_replica -> sending_updates DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [50856] [28/Oct/2020:23:30:05.423770082 -0400] - DEBUG - csngen_adjust_time - gen state before 5f9a373d0002:1603942205:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [51037] [28/Oct/2020:23:30:05.427194394 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-136-251:39201)): Consumer RUV: DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [51193] [28/Oct/2020:23:30:05.430298948 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replicageneration} 5f9a36f8000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [51406] [28/Oct/2020:23:30:05.433549004 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replica 1 ldap://localhost.localdomain:39001} 5f9a36f8000100010000 5f9a3735000000010000 00000000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [51587] [28/Oct/2020:23:30:05.440186678 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-136-251:39201)): Supplier RUV: DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [51743] [28/Oct/2020:23:30:05.443692148 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replicageneration} 5f9a36f8000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [51956] [28/Oct/2020:23:30:05.447169100 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replica 1 ldap://localhost.localdomain:39001} 5f9a36f8000100010000 5f9a373d000000010000 5f9a373d DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [52118] [28/Oct/2020:23:30:05.450605320 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_get_buffer - found thread private buffer cache 0x7ff56003b400 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [52345] [28/Oct/2020:23:30:05.454270973 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_get_buffer - _pool is 0x7ff58e23f2e0 _pool->pl_busy_lists is 0x7ff560f05ae0 _pool->pl_busy_lists->bl_buffers is 0x7ff56003b400 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [52641] [28/Oct/2020:23:30:05.458172147 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-136-251:39201) - (cscb 0 - state 0) - csnPrevMax () csnMax (5f9a373d000000010000) csnBuf (00000000000000000000) csnConsumerMax (5f9a3735000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [52754] [28/Oct/2020:23:30:05.463995793 -0400] - DEBUG - clcache_initial_anchorcsn - anchor is now: 5f9a3735000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [52945] [28/Oct/2020:23:30:05.467774248 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - agmt="cn=201" (ci-vm-10-0-136-251:39201): CSN 5f9a3735000000010000 found, position set for replay DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [53101] [28/Oct/2020:23:30:05.471591745 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_get_next_change - load=1 rec=1 csn=5f9a373d000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [53213] [28/Oct/2020:23:30:05.475522911 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Starting DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [53345] [28/Oct/2020:23:30:05.479665166 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [53565] [28/Oct/2020:23:30:05.484556411 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-136-251:39201): Sending modify operation (dn="cn=test_entry,dc=example,dc=com" csn=5f9a373d000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [53763] [28/Oct/2020:23:30:05.488287149 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-136-251:39201): Consumer successfully sent operation with csn 5f9a373d000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [53895] [28/Oct/2020:23:30:05.492781913 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [54210] [28/Oct/2020:23:30:05.497016625 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_adjust_anchorcsn - agmt="cn=201" (ci-vm-10-0-136-251:39201) - (cscb 0 - state 1) - csnPrevMax (5f9a373d000000010000) csnMax (5f9a373d000000010000) csnBuf (5f9a373d000000010000) csnConsumerMax (5f9a373d000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [54334] [28/Oct/2020:23:30:05.501430645 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_load_buffer - rc=-30988 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [54518] [28/Oct/2020:23:30:05.505324995 -0400] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-136-251:39201): No more updates to send (cl5GetNextOperationToReplay) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [54630] [28/Oct/2020:23:30:05.508458488 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 0 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [54763] [28/Oct/2020:23:30:05.511821335 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [54893] [28/Oct/2020:23:30:05.518231135 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Result 3, 0, 0, 10, (null) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [55026] [28/Oct/2020:23:30:05.522296643 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [55159] [28/Oct/2020:23:30:05.528918922 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [55292] [28/Oct/2020:23:30:05.534810284 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [55425] [28/Oct/2020:23:30:05.543494044 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [55558] [28/Oct/2020:23:30:05.556865710 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [55691] [28/Oct/2020:23:30:05.576924247 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [55804] [28/Oct/2020:23:30:05.611822815 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 10 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [55913] [28/Oct/2020:23:30:05.615821200 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain exiting DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [56185] [28/Oct/2020:23:30:05.619626570 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_return_buffer - session end: state=5 load=1 sent=1 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [56349] [28/Oct/2020:23:30:05.627081988 -0400] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-136-251:39201): Successfully released consumer DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [56519] [28/Oct/2020:23:30:05.630407035 -0400] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-136-251:39201) - Beginning linger on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [56693] [28/Oct/2020:23:30:05.634236549 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: sending_updates -> wait_for_changes DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [56693] DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:67 _pattern_errorlog: end at offset 56693
Passed suites/schema/schema_replication_test.py::test_schema_replication_seven 9.79
-------------------------------Captured log call--------------------------------
INFO  lib389:schema_replication_test.py:41 ############################################### INFO  lib389:schema_replication_test.py:42 ####### INFO  lib389:schema_replication_test.py:43 ####### Same OC - extra MAY: Schema is pushed - no error INFO  lib389:schema_replication_test.py:44 ####### INFO  lib389:schema_replication_test.py:45 ################################################### DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'12' (expected 13) INFO  lib389:agreement.py:1194 Pausing replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'13' (expected 14) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:562 test_schema_replication_seven master_schema_csn=b'5f9a373e000000000000' DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:563 ctest_schema_replication_seven consumer_schema_csn=b'5f9a373e000000000000' DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:55 _pattern_errorlog: start at offset 56694 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [56801] 28/Oct/2020:23:30:06.722200849 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [56909] [28/Oct/2020:23:30:06.820262567 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [57031] [28/Oct/2020:23:30:06.835892224 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 5f9a373d0002:1603942205:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [57152] [28/Oct/2020:23:30:06.841350909 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 5f9a373e0000:1603942206:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [57315] [28/Oct/2020:23:30:06.845510991 -0400] - DEBUG - NSMMReplicationPlugin - ruv_add_csn_inprogress - Successfully inserted csn 5f9a373e000000010000 into pending list DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [57495] [28/Oct/2020:23:30:06.850168741 -0400] - DEBUG - NSMMReplicationPlugin - purge_entry_state_information - From entry cn=test_entry,dc=example,dc=com up to CSN 5f90fcbd000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [57740] [28/Oct/2020:23:30:06.856876434 -0400] - DEBUG - NSMMReplicationPlugin - write_changelog_and_ruv - Writing change for cn=test_entry,dc=example,dc=com (uniqid: e2746602-199611eb-bbbd91ac-3101a0f1, optype: 8) to changelog csn 5f9a373e000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [57915] [28/Oct/2020:23:30:06.860618707 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - cl5WriteOperationTxn - Successfully written entry with csn (5f9a373e000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [58053] [28/Oct/2020:23:30:06.867509227 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: committing all csns for csn 5f9a373e000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [58183] [28/Oct/2020:23:30:06.872507419 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: processing data csn 5f9a373e000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [58321] [28/Oct/2020:23:30:06.882818340 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Successfully committed csn 5f9a373e000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [58449] [28/Oct/2020:23:30:06.887136309 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Rolled up to csn 5f9a373e000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [58564] [28/Oct/2020:23:30:06.896649181 -0400] - DEBUG - replication - multimaster_mmr_postop - error 0 for operation 561. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [58739] [28/Oct/2020:23:30:06.902033895 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: wait_for_changes -> wait_for_changes DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [58922] [28/Oct/2020:23:30:07.001658209 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: wait_for_changes -> ready_to_acquire_replica DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [59094] [28/Oct/2020:23:30:07.007827637 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-136-251:39201) - Canceling linger on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [59216] [28/Oct/2020:23:30:07.011937039 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 5f9a373e0001:1603942206:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [59337] [28/Oct/2020:23:30:07.015409778 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 5f9a373f0000:1603942207:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [59505] [28/Oct/2020:23:30:07.019934297 -0400] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-136-251:39201): Replica was successfully acquired. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [59687] [28/Oct/2020:23:30:07.023356666 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: ready_to_acquire_replica -> sending_updates DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [59871] [28/Oct/2020:23:30:07.026353518 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Checking consumer schema localcsn:5f9a373e000000000000 / remotecsn:5f9a3735000000000000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [60034] [28/Oct/2020:23:30:07.168867983 -0400] - DEBUG - schema_oc_compare_strict - Attribute postOfficeBox is not allowed in 'masterNewOCA' of the remote consumer schema DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [60168] [28/Oct/2020:23:30:07.249448215 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Reread remotecsn:5f9a3735000000000000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [60353] [28/Oct/2020:23:30:07.253231828 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - Schema checking successful: ok to push the schema (agmt="cn=201" (ci-vm-10-0-136-251:39201)) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [60468] [28/Oct/2020:23:30:07.448113210 -0400] - DEBUG - csngen_adjust_time - gen state before 5f9a373f0001:1603942207:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [60649] [28/Oct/2020:23:30:07.454702884 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-136-251:39201)): Consumer RUV: DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [60805] [28/Oct/2020:23:30:07.457678155 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replicageneration} 5f9a36f8000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [61018] [28/Oct/2020:23:30:07.460637818 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replica 1 ldap://localhost.localdomain:39001} 5f9a36f8000100010000 5f9a373d000000010000 00000000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [61199] [28/Oct/2020:23:30:07.463629005 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-136-251:39201)): Supplier RUV: DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [61355] [28/Oct/2020:23:30:07.467767406 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replicageneration} 5f9a36f8000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [61568] [28/Oct/2020:23:30:07.473212225 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replica 1 ldap://localhost.localdomain:39001} 5f9a36f8000100010000 5f9a373e000000010000 5f9a373e DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [61730] [28/Oct/2020:23:30:07.476172791 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_get_buffer - found thread private buffer cache 0x7ff56003b400 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [61957] [28/Oct/2020:23:30:07.478839422 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_get_buffer - _pool is 0x7ff58e23f2e0 _pool->pl_busy_lists is 0x7ff560f05ae0 _pool->pl_busy_lists->bl_buffers is 0x7ff56003b400 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [62253] [28/Oct/2020:23:30:07.485276402 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-136-251:39201) - (cscb 0 - state 0) - csnPrevMax () csnMax (5f9a373e000000010000) csnBuf (5f9a373d000000010000) csnConsumerMax (5f9a373d000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [62366] [28/Oct/2020:23:30:07.488566705 -0400] - DEBUG - clcache_initial_anchorcsn - anchor is now: 5f9a373d000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [62557] [28/Oct/2020:23:30:07.491545209 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - agmt="cn=201" (ci-vm-10-0-136-251:39201): CSN 5f9a373d000000010000 found, position set for replay DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [62713] [28/Oct/2020:23:30:07.495888908 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_get_next_change - load=1 rec=1 csn=5f9a373e000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [62933] [28/Oct/2020:23:30:07.499671382 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-136-251:39201): Sending modify operation (dn="cn=test_entry,dc=example,dc=com" csn=5f9a373e000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [63045] [28/Oct/2020:23:30:07.502356208 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Starting DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [63243] [28/Oct/2020:23:30:07.507301537 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-136-251:39201): Consumer successfully sent operation with csn 5f9a373e000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [63375] [28/Oct/2020:23:30:07.510186133 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [63690] [28/Oct/2020:23:30:07.513640612 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_adjust_anchorcsn - agmt="cn=201" (ci-vm-10-0-136-251:39201) - (cscb 0 - state 1) - csnPrevMax (5f9a373e000000010000) csnMax (5f9a373e000000010000) csnBuf (5f9a373e000000010000) csnConsumerMax (5f9a373e000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [63814] [28/Oct/2020:23:30:07.516995100 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_load_buffer - rc=-30988 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [63998] [28/Oct/2020:23:30:07.520385587 -0400] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-136-251:39201): No more updates to send (cl5GetNextOperationToReplay) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [64110] [28/Oct/2020:23:30:07.523627937 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 0 17 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [64243] [28/Oct/2020:23:30:07.526164237 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 17 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [64373] [28/Oct/2020:23:30:07.529577702 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Result 3, 0, 0, 17, (null) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [64506] [28/Oct/2020:23:30:07.532535782 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 17 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [64639] [28/Oct/2020:23:30:07.536284424 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 17 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [64772] [28/Oct/2020:23:30:07.541941279 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 17 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [64905] [28/Oct/2020:23:30:07.550982390 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 17 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [65038] [28/Oct/2020:23:30:07.562367770 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 17 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [65171] [28/Oct/2020:23:30:07.581148947 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 17 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [65304] [28/Oct/2020:23:30:07.616583772 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 17 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [65417] [28/Oct/2020:23:30:07.710929591 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 17 17 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [65526] [28/Oct/2020:23:30:07.774783320 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain exiting DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [65798] [28/Oct/2020:23:30:07.778130017 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_return_buffer - session end: state=5 load=1 sent=1 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [65962] [28/Oct/2020:23:30:07.785423087 -0400] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-136-251:39201): Successfully released consumer DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [66132] [28/Oct/2020:23:30:07.788632560 -0400] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-136-251:39201) - Beginning linger on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [66306] [28/Oct/2020:23:30:07.792288631 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: sending_updates -> wait_for_changes DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [66414] [28/Oct/2020:23:30:07.908164326 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [66585] [28/Oct/2020:23:30:07.911940986 -0400] - DEBUG - NSMMReplicationPlugin - agmt_set_enabled_from_entry: agreement is now disabled (agmt="cn=201" (ci-vm-10-0-136-251:39201)) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [66760] [28/Oct/2020:23:30:07.915411659 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: wait_for_changes -> wait_for_changes DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [66935] [28/Oct/2020:23:30:07.918906837 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: wait_for_changes -> wait_for_changes DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [67107] [28/Oct/2020:23:30:07.922627130 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-136-251:39201) - Canceling linger on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [67282] [28/Oct/2020:23:30:07.926229527 -0400] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-136-251:39201) - Disconnected from the consumer DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [67447] [28/Oct/2020:23:30:08.015185499 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_stop - agmt="cn=201" (ci-vm-10-0-136-251:39201): Protocol stopped after 0 seconds DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [67575] [28/Oct/2020:23:30:08.018262355 -0400] - DEBUG - NSMMReplicationPlugin - Database RUV: {replicageneration} 5f9a36f8000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [67760] [28/Oct/2020:23:30:08.020956740 -0400] - DEBUG - NSMMReplicationPlugin - Database RUV: {replica 1 ldap://localhost.localdomain:39001} 5f9a36f8000100010000 5f9a373e000000010000 5f9a373e DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [67935] [28/Oct/2020:23:30:08.023622610 -0400] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-136-251:39201) - Disconnected from the consumer DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [68043] [28/Oct/2020:23:30:08.026181059 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [68151] [28/Oct/2020:23:30:13.042225352 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [68321] [28/Oct/2020:23:30:13.117382185 -0400] - DEBUG - NSMMReplicationPlugin - agmt_set_enabled_from_entry: agreement is now enabled (agmt="cn=201" (ci-vm-10-0-136-251:39201)) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [68496] [28/Oct/2020:23:30:13.121491430 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-136-251:39201) - No linger to cancel on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [68671] [28/Oct/2020:23:30:13.125167079 -0400] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-136-251:39201) - Disconnected from the consumer DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [68843] [28/Oct/2020:23:30:13.128655973 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: start -> ready_to_acquire_replica DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [69012] [28/Oct/2020:23:30:13.132833895 -0400] - DEBUG - NSMMReplicationPlugin - conn_connect - agmt="cn=201" (ci-vm-10-0-136-251:39201) - Trying non-secure slapi_ldap_init_ext DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [69540] [28/Oct/2020:23:30:13.141870329 -0400] - DEBUG - NSMMReplicationPlugin - conn_connect - agmt="cn=201" (ci-vm-10-0-136-251:39201) - binddn = cn=ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701,ou=services,dc=example,dc=com, passwd = {AES-TUhNR0NTcUdTSWIzRFFFRkRUQm1NRVVHQ1NxR1NJYjNEUUVGRERBNEJDUmtaVFV5WlRsbVl5MWtORGhrTTJJdw0KTlMxaFl6VTNNamcwT1Mxak1XVm1aVEEyTlFBQ0FRSUNBU0F3Q2dZSUtvWklodmNOQWdjd0hRWUpZSVpJQVdVRA0KQkFFcUJCRDhpTkdqOTI2RVV0eS9mSGtIMW1INw==}1aP0CGo7VTyXKYAH9mQPJxPbmZao4qtbDnejHuqj6hueREUbjTmpIFJ9v+wKLJM0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [69585] hwtXYLylnARfBAm0SlP5kfwhZ0UpujEuyyXSDnqmiaE= DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [69760] [28/Oct/2020:23:30:13.146719212 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-136-251:39201) - No linger to cancel on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [69882] [28/Oct/2020:23:30:13.153158848 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 5f9a373f0001:1603942207:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [70003] [28/Oct/2020:23:30:13.156250436 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 5f9a37450000:1603942213:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [70171] [28/Oct/2020:23:30:13.160353072 -0400] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-136-251:39201): Replica was successfully acquired. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [70353] [28/Oct/2020:23:30:13.163239107 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: ready_to_acquire_replica -> sending_updates DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [70537] [28/Oct/2020:23:30:13.166183989 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Checking consumer schema localcsn:5f9a373e000000000000 / remotecsn:5f9a3735000000000000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [70671] [28/Oct/2020:23:30:13.379670553 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Reread remotecsn:5f9a373e000000000000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [70786] [28/Oct/2020:23:30:13.384542015 -0400] - DEBUG - csngen_adjust_time - gen state before 5f9a37450001:1603942213:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [70967] [28/Oct/2020:23:30:13.388001596 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-136-251:39201)): Consumer RUV: DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [71123] [28/Oct/2020:23:30:13.390975575 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replicageneration} 5f9a36f8000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [71336] [28/Oct/2020:23:30:13.393549706 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replica 1 ldap://localhost.localdomain:39001} 5f9a36f8000100010000 5f9a373e000000010000 00000000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [71517] [28/Oct/2020:23:30:13.396387179 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-136-251:39201)): Supplier RUV: DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [71673] [28/Oct/2020:23:30:13.399144466 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replicageneration} 5f9a36f8000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [71886] [28/Oct/2020:23:30:13.402018880 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replica 1 ldap://localhost.localdomain:39001} 5f9a36f8000100010000 5f9a373e000000010000 5f9a373e DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [72182] [28/Oct/2020:23:30:13.404605359 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-136-251:39201) - (cscb 0 - state 1) - csnPrevMax () csnMax (5f9a373e000000010000) csnBuf (00000000000000000000) csnConsumerMax (5f9a373e000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [72306] [28/Oct/2020:23:30:13.407052314 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_load_buffer - rc=-30988 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [72578] [28/Oct/2020:23:30:13.410002308 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_return_buffer - session end: state=5 load=0 sent=0 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [72727] [28/Oct/2020:23:30:13.412601499 -0400] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-136-251:39201): No changes to send DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [72891] [28/Oct/2020:23:30:13.418810390 -0400] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-136-251:39201): Successfully released consumer DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [73061] [28/Oct/2020:23:30:13.421959123 -0400] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-136-251:39201) - Beginning linger on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [73235] [28/Oct/2020:23:30:13.424714491 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: sending_updates -> wait_for_changes DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [73357] [28/Oct/2020:23:30:15.130783476 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 5f9a37450001:1603942213:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [73478] [28/Oct/2020:23:30:15.134956507 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 5f9a37470000:1603942215:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [73641] [28/Oct/2020:23:30:15.143029923 -0400] - DEBUG - NSMMReplicationPlugin - ruv_add_csn_inprogress - Successfully inserted csn 5f9a3747000000010000 into pending list DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [73821] [28/Oct/2020:23:30:15.146184350 -0400] - DEBUG - NSMMReplicationPlugin - purge_entry_state_information - From entry cn=test_entry,dc=example,dc=com up to CSN 5f90fcbe000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [74066] [28/Oct/2020:23:30:15.149569521 -0400] - DEBUG - NSMMReplicationPlugin - write_changelog_and_ruv - Writing change for cn=test_entry,dc=example,dc=com (uniqid: e2746602-199611eb-bbbd91ac-3101a0f1, optype: 8) to changelog csn 5f9a3747000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [74241] [28/Oct/2020:23:30:15.152392886 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - cl5WriteOperationTxn - Successfully written entry with csn (5f9a3747000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [74379] [28/Oct/2020:23:30:15.155267924 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: committing all csns for csn 5f9a3747000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [74509] [28/Oct/2020:23:30:15.158032709 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: processing data csn 5f9a3747000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [74647] [28/Oct/2020:23:30:15.286339049 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Successfully committed csn 5f9a3747000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [74775] [28/Oct/2020:23:30:15.292616195 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Rolled up to csn 5f9a3747000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [74890] [28/Oct/2020:23:30:15.295814377 -0400] - DEBUG - replication - multimaster_mmr_postop - error 0 for operation 561. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [75065] [28/Oct/2020:23:30:15.313421577 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: wait_for_changes -> wait_for_changes DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [75248] [28/Oct/2020:23:30:15.316600657 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: wait_for_changes -> ready_to_acquire_replica DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [75420] [28/Oct/2020:23:30:15.319332900 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-136-251:39201) - Canceling linger on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [75588] [28/Oct/2020:23:30:15.329242462 -0400] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-136-251:39201): Replica was successfully acquired. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [75770] [28/Oct/2020:23:30:15.331916618 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: ready_to_acquire_replica -> sending_updates DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [75885] [28/Oct/2020:23:30:15.334461021 -0400] - DEBUG - csngen_adjust_time - gen state before 5f9a37470002:1603942215:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [76066] [28/Oct/2020:23:30:15.336832940 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-136-251:39201)): Consumer RUV: DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [76222] [28/Oct/2020:23:30:15.339653383 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replicageneration} 5f9a36f8000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [76435] [28/Oct/2020:23:30:15.342657966 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replica 1 ldap://localhost.localdomain:39001} 5f9a36f8000100010000 5f9a373e000000010000 00000000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [76616] [28/Oct/2020:23:30:15.345375619 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-136-251:39201)): Supplier RUV: DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [76772] [28/Oct/2020:23:30:15.348578388 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replicageneration} 5f9a36f8000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [76985] [28/Oct/2020:23:30:15.351660018 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replica 1 ldap://localhost.localdomain:39001} 5f9a36f8000100010000 5f9a3747000000010000 5f9a3747 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [77147] [28/Oct/2020:23:30:15.354391115 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_get_buffer - found thread private buffer cache 0x7ff56003b500 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [77374] [28/Oct/2020:23:30:15.357149166 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_get_buffer - _pool is 0x7ff58e23f2e0 _pool->pl_busy_lists is 0x7ff560f05ae0 _pool->pl_busy_lists->bl_buffers is 0x7ff56003b500 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [77670] [28/Oct/2020:23:30:15.360653155 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-136-251:39201) - (cscb 0 - state 0) - csnPrevMax () csnMax (5f9a3747000000010000) csnBuf (00000000000000000000) csnConsumerMax (5f9a373e000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [77783] [28/Oct/2020:23:30:15.363365755 -0400] - DEBUG - clcache_initial_anchorcsn - anchor is now: 5f9a373e000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [77974] [28/Oct/2020:23:30:15.365810858 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - agmt="cn=201" (ci-vm-10-0-136-251:39201): CSN 5f9a373e000000010000 found, position set for replay DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [78130] [28/Oct/2020:23:30:15.368799631 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_get_next_change - load=1 rec=1 csn=5f9a3747000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [78242] [28/Oct/2020:23:30:15.371411705 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Starting DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [78374] [28/Oct/2020:23:30:15.381656390 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [78594] [28/Oct/2020:23:30:15.384737117 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-136-251:39201): Sending modify operation (dn="cn=test_entry,dc=example,dc=com" csn=5f9a3747000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [78726] [28/Oct/2020:23:30:15.387194670 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [78924] [28/Oct/2020:23:30:15.391098953 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-136-251:39201): Consumer successfully sent operation with csn 5f9a3747000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [79239] [28/Oct/2020:23:30:15.402910035 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_adjust_anchorcsn - agmt="cn=201" (ci-vm-10-0-136-251:39201) - (cscb 0 - state 1) - csnPrevMax (5f9a3747000000010000) csnMax (5f9a3747000000010000) csnBuf (5f9a3747000000010000) csnConsumerMax (5f9a3747000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [79363] [28/Oct/2020:23:30:15.406096891 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_load_buffer - rc=-30988 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [79547] [28/Oct/2020:23:30:15.408952589 -0400] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-136-251:39201): No more updates to send (cl5GetNextOperationToReplay) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [79659] [28/Oct/2020:23:30:15.412269795 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 0 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [79791] [28/Oct/2020:23:30:15.421237544 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [79924] [28/Oct/2020:23:30:15.432238382 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [80054] [28/Oct/2020:23:30:15.435573880 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Result 3, 0, 0, 10, (null) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [80187] [28/Oct/2020:23:30:15.439420369 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [80320] [28/Oct/2020:23:30:15.443281164 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [80453] [28/Oct/2020:23:30:15.448173631 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [80586] [28/Oct/2020:23:30:15.455658586 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [80719] [28/Oct/2020:23:30:15.468166607 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [80852] [28/Oct/2020:23:30:15.487366255 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [80965] [28/Oct/2020:23:30:15.521682870 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 10 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [81074] [28/Oct/2020:23:30:15.524366991 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain exiting DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [81346] [28/Oct/2020:23:30:15.527407420 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_return_buffer - session end: state=5 load=1 sent=1 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [81510] [28/Oct/2020:23:30:15.533381330 -0400] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-136-251:39201): Successfully released consumer DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [81680] [28/Oct/2020:23:30:15.536092434 -0400] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-136-251:39201) - Beginning linger on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [81854] [28/Oct/2020:23:30:15.538686776 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: sending_updates -> wait_for_changes DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [81854] DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:67 _pattern_errorlog: end at offset 81854
Passed suites/schema/schema_replication_test.py::test_schema_replication_eight 12.04
-------------------------------Captured log call--------------------------------
INFO  lib389:schema_replication_test.py:41 ############################################### INFO  lib389:schema_replication_test.py:42 ####### INFO  lib389:schema_replication_test.py:43 ####### Same OC - extra MAY: Schema is pushed (fix for 47721) INFO  lib389:schema_replication_test.py:44 ####### INFO  lib389:schema_replication_test.py:45 ################################################### DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'14' (expected 15) INFO  lib389:agreement.py:1194 Pausing replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'15' (expected 16) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:625 test_schema_replication_eight master_schema_csn=b'5f9a374b000000000000' DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:626 ctest_schema_replication_eight onsumer_schema_csn=b'5f9a374b000000000000' DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:55 _pattern_errorlog: start at offset 81855 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [81962] 28/Oct/2020:23:30:18.886941152 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [82070] [28/Oct/2020:23:30:19.197276191 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [82192] [28/Oct/2020:23:30:19.211602122 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 5f9a37470002:1603942215:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [82313] [28/Oct/2020:23:30:19.216352699 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 5f9a374b0000:1603942219:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [82476] [28/Oct/2020:23:30:19.219161213 -0400] - DEBUG - NSMMReplicationPlugin - ruv_add_csn_inprogress - Successfully inserted csn 5f9a374b000000010000 into pending list DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [82656] [28/Oct/2020:23:30:19.226866239 -0400] - DEBUG - NSMMReplicationPlugin - purge_entry_state_information - From entry cn=test_entry,dc=example,dc=com up to CSN 5f90fcc7000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [82901] [28/Oct/2020:23:30:19.230146801 -0400] - DEBUG - NSMMReplicationPlugin - write_changelog_and_ruv - Writing change for cn=test_entry,dc=example,dc=com (uniqid: e2746602-199611eb-bbbd91ac-3101a0f1, optype: 8) to changelog csn 5f9a374b000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [83076] [28/Oct/2020:23:30:19.232956832 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - cl5WriteOperationTxn - Successfully written entry with csn (5f9a374b000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [83214] [28/Oct/2020:23:30:19.235716488 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: committing all csns for csn 5f9a374b000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [83344] [28/Oct/2020:23:30:19.238382798 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: processing data csn 5f9a374b000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [83482] [28/Oct/2020:23:30:19.241329109 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Successfully committed csn 5f9a374b000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [83610] [28/Oct/2020:23:30:19.244164295 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Rolled up to csn 5f9a374b000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [83725] [28/Oct/2020:23:30:19.247286254 -0400] - DEBUG - replication - multimaster_mmr_postop - error 0 for operation 561. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [83900] [28/Oct/2020:23:30:19.252236837 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: wait_for_changes -> wait_for_changes DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [84083] [28/Oct/2020:23:30:19.255196817 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: wait_for_changes -> ready_to_acquire_replica DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [84255] [28/Oct/2020:23:30:19.258137650 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-136-251:39201) - Canceling linger on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [84423] [28/Oct/2020:23:30:19.262479043 -0400] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-136-251:39201): Replica was successfully acquired. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [84605] [28/Oct/2020:23:30:19.265185531 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: ready_to_acquire_replica -> sending_updates DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [84789] [28/Oct/2020:23:30:19.268231963 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Checking consumer schema localcsn:5f9a374b000000000000 / remotecsn:5f9a373e000000000000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [84952] [28/Oct/2020:23:30:19.408412746 -0400] - DEBUG - schema_oc_compare_strict - Attribute postOfficeBox is not allowed in 'masterNewOCC' of the remote consumer schema DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [85117] [28/Oct/2020:23:30:19.411914689 -0400] - DEBUG - schema_oc_compare_strict - Attribute postOfficeBox is not allowed in 'consumerNewOCA' of the remote consumer schema DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [85271] [28/Oct/2020:23:30:19.414956429 -0400] - DEBUG - schema_oc_superset_check - Remote consumerNewOCA schema objectclasses is a superset of the received one. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [85434] [28/Oct/2020:23:30:19.424842066 -0400] - DEBUG - schema_oc_compare_strict - Attribute postOfficeBox is not allowed in 'masterNewOCC' of the remote consumer schema DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [85599] [28/Oct/2020:23:30:19.442426453 -0400] - DEBUG - schema_oc_compare_strict - Attribute postOfficeBox is not allowed in 'consumerNewOCA' of the remote consumer schema DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [85749] [28/Oct/2020:23:30:19.448883351 -0400] - DEBUG - schema_list_oc2learn - Add that unknown/extended objectclass consumerNewOCA (1.2.3.4.5.6.7.8.9.10.1) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [86049] [28/Oct/2020:23:30:19.451684614 -0400] - DEBUG - schema_oc_to_string - Replace (old[265]=( 1.2.3.4.5.6.7.8.9.10.1 NAME 'consumerNewOCA' DESC 'To test ticket 47490' SUP 'person' AUXILIARY MUST ( postalAddress $ preferredLocale $ telexNumber ) MAY ( postalCode $ street ) X-ORIGIN 'blahblahblah' )) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [86378] [28/Oct/2020:23:30:19.455255712 -0400] - DEBUG - supplier_get_new_definitions - supplier takes objectclass: ( 1.2.3.4.5.6.7.8.9.10.1 NAME 'consumerNewOCA' DESC 'To test ticket 47490' SUP person AUXILIARY MUST ( postalAddress $ preferredLocale $ telexNumber ) MAY ( postalCode $ street $ postOfficeBox ) X-ORIGIN 'user defined' ) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [86694] [28/Oct/2020:23:30:19.470983004 -0400] - DEBUG - modify_schema_prepare_mods - MOD[1] del (objectclasses): ( 1.2.3.4.5.6.7.8.9.10.1 NAME 'consumerNewOCA' DESC 'To test ticket 47490' SUP 'person' AUXILIARY MUST ( postalAddress $ preferredLocale $ telexNumber ) MAY ( postalCode $ street ) X-ORIGIN 'blahblahblah' ) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [87021] [28/Oct/2020:23:30:19.474972554 -0400] - DEBUG - modify_schema_prepare_mods - MOD[0] add (objectclasses): ( 1.2.3.4.5.6.7.8.9.10.1 NAME 'consumerNewOCA' DESC 'To test ticket 47490' SUP person AUXILIARY MUST ( postalAddress $ preferredLocale $ telexNumber ) MAY ( postalCode $ street $ postOfficeBox ) X-ORIGIN 'user defined' ) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [87129] [28/Oct/2020:23:30:19.477891054 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [87252] [28/Oct/2020:23:30:19.482848699 -0400] - DEBUG - modify_schema_internal_mod - Successfully learn objectclasses definitions DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [87466] [28/Oct/2020:23:30:19.486264545 -0400] - ERR - NSMMReplicationPlugin - update_consumer_schema - [S] Schema agmt="cn=201" (ci-vm-10-0-136-251:39201) must not be overwritten (set replication log for additional info) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:67 _pattern_errorlog: end at offset 87466
Passed suites/schema/schema_replication_test.py::test_schema_replication_nine 9.59
-------------------------------Captured log call--------------------------------
INFO  lib389:schema_replication_test.py:41 ############################################### INFO  lib389:schema_replication_test.py:42 ####### INFO  lib389:schema_replication_test.py:43 ####### Same OC - extra MAY: Schema is pushed - no error INFO  lib389:schema_replication_test.py:44 ####### INFO  lib389:schema_replication_test.py:45 ################################################### DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'16' (expected 17) INFO  lib389:agreement.py:1194 Pausing replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'17' (expected 18) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:683 test_schema_replication_nine master_schema_csn=b'5f9a3754000000000000' DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:684 ctest_schema_replication_nine onsumer_schema_csn=b'5f9a3754000000000000' DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:55 _pattern_errorlog: start at offset 87467 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [87603] 28/Oct/2020:23:30:19.489321065 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] schema definitions may have been learned DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [87766] [28/Oct/2020:23:30:19.634321141 -0400] - DEBUG - schema_oc_compare_strict - Attribute postOfficeBox is not allowed in 'masterNewOCC' of the remote consumer schema DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [87900] [28/Oct/2020:23:30:19.714206250 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Reread remotecsn:5f9a3748000000000000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [88085] [28/Oct/2020:23:30:19.717169485 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - Schema checking successful: ok to push the schema (agmt="cn=201" (ci-vm-10-0-136-251:39201)) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [88200] [28/Oct/2020:23:30:19.913503146 -0400] - DEBUG - csngen_adjust_time - gen state before 5f9a374b0002:1603942219:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [88381] [28/Oct/2020:23:30:19.919250927 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-136-251:39201)): Consumer RUV: DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [88537] [28/Oct/2020:23:30:19.923104650 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replicageneration} 5f9a36f8000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [88750] [28/Oct/2020:23:30:19.926842156 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replica 1 ldap://localhost.localdomain:39001} 5f9a36f8000100010000 5f9a3747000000010000 00000000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [88931] [28/Oct/2020:23:30:19.933661507 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-136-251:39201)): Supplier RUV: DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [89087] [28/Oct/2020:23:30:19.942680480 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replicageneration} 5f9a36f8000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [89300] [28/Oct/2020:23:30:19.946108926 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replica 1 ldap://localhost.localdomain:39001} 5f9a36f8000100010000 5f9a374b000000010000 5f9a374b DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [89462] [28/Oct/2020:23:30:19.953132960 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_get_buffer - found thread private buffer cache 0x7ff56003b500 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [89689] [28/Oct/2020:23:30:19.960488655 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_get_buffer - _pool is 0x7ff58e23f2e0 _pool->pl_busy_lists is 0x7ff560f05ae0 _pool->pl_busy_lists->bl_buffers is 0x7ff56003b500 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [89985] [28/Oct/2020:23:30:19.965187889 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-136-251:39201) - (cscb 0 - state 0) - csnPrevMax () csnMax (5f9a374b000000010000) csnBuf (5f9a3747000000010000) csnConsumerMax (5f9a3747000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [90098] [28/Oct/2020:23:30:19.970377089 -0400] - DEBUG - clcache_initial_anchorcsn - anchor is now: 5f9a3747000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [90289] [28/Oct/2020:23:30:19.974681854 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - agmt="cn=201" (ci-vm-10-0-136-251:39201): CSN 5f9a3747000000010000 found, position set for replay DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [90445] [28/Oct/2020:23:30:19.982097863 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_get_next_change - load=1 rec=1 csn=5f9a374b000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [90557] [28/Oct/2020:23:30:19.986088481 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Starting DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [90689] [28/Oct/2020:23:30:19.989425210 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [90909] [28/Oct/2020:23:30:19.994322852 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-136-251:39201): Sending modify operation (dn="cn=test_entry,dc=example,dc=com" csn=5f9a374b000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [91041] [28/Oct/2020:23:30:19.998389266 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [91239] [28/Oct/2020:23:30:20.004629300 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-136-251:39201): Consumer successfully sent operation with csn 5f9a374b000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [91372] [28/Oct/2020:23:30:20.008126643 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 19 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [91502] [28/Oct/2020:23:30:20.012311012 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Result 3, 0, 0, 19, (null) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [91635] [28/Oct/2020:23:30:20.015085295 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 19 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [91950] [28/Oct/2020:23:30:20.017596387 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_adjust_anchorcsn - agmt="cn=201" (ci-vm-10-0-136-251:39201) - (cscb 0 - state 1) - csnPrevMax (5f9a374b000000010000) csnMax (5f9a374b000000010000) csnBuf (5f9a374b000000010000) csnConsumerMax (5f9a374b000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [92074] [28/Oct/2020:23:30:20.020116035 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_load_buffer - rc=-30988 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [92258] [28/Oct/2020:23:30:20.022764084 -0400] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-136-251:39201): No more updates to send (cl5GetNextOperationToReplay) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [92371] [28/Oct/2020:23:30:20.025743748 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 19 19 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [92504] [28/Oct/2020:23:30:20.028485896 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 19 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [92613] [28/Oct/2020:23:30:20.033428282 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain exiting DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [92885] [28/Oct/2020:23:30:20.036879003 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_return_buffer - session end: state=5 load=1 sent=1 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [93049] [28/Oct/2020:23:30:20.043240127 -0400] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-136-251:39201): Successfully released consumer DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [93219] [28/Oct/2020:23:30:20.046494473 -0400] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-136-251:39201) - Beginning linger on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [93393] [28/Oct/2020:23:30:20.049762679 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: sending_updates -> wait_for_changes DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [93501] [28/Oct/2020:23:30:20.257150085 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [93672] [28/Oct/2020:23:30:20.262103740 -0400] - DEBUG - NSMMReplicationPlugin - agmt_set_enabled_from_entry: agreement is now disabled (agmt="cn=201" (ci-vm-10-0-136-251:39201)) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [93847] [28/Oct/2020:23:30:20.265250591 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: wait_for_changes -> wait_for_changes DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [94022] [28/Oct/2020:23:30:20.273284786 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: wait_for_changes -> wait_for_changes DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [94194] [28/Oct/2020:23:30:20.276968674 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-136-251:39201) - Canceling linger on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [94369] [28/Oct/2020:23:30:20.280276230 -0400] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-136-251:39201) - Disconnected from the consumer DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [94534] [28/Oct/2020:23:30:20.365478532 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_stop - agmt="cn=201" (ci-vm-10-0-136-251:39201): Protocol stopped after 0 seconds DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [94662] [28/Oct/2020:23:30:20.371926185 -0400] - DEBUG - NSMMReplicationPlugin - Database RUV: {replicageneration} 5f9a36f8000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [94847] [28/Oct/2020:23:30:20.374842025 -0400] - DEBUG - NSMMReplicationPlugin - Database RUV: {replica 1 ldap://localhost.localdomain:39001} 5f9a36f8000100010000 5f9a374b000000010000 5f9a374b DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [95022] [28/Oct/2020:23:30:20.379631328 -0400] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-136-251:39201) - Disconnected from the consumer DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [95130] [28/Oct/2020:23:30:20.382543944 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [95238] [28/Oct/2020:23:30:25.401099272 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [95408] [28/Oct/2020:23:30:25.404991797 -0400] - DEBUG - NSMMReplicationPlugin - agmt_set_enabled_from_entry: agreement is now enabled (agmt="cn=201" (ci-vm-10-0-136-251:39201)) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [95583] [28/Oct/2020:23:30:25.408487547 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-136-251:39201) - No linger to cancel on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [95758] [28/Oct/2020:23:30:25.411757646 -0400] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-136-251:39201) - Disconnected from the consumer DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [95930] [28/Oct/2020:23:30:25.414772142 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: start -> ready_to_acquire_replica DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [96099] [28/Oct/2020:23:30:25.419470175 -0400] - DEBUG - NSMMReplicationPlugin - conn_connect - agmt="cn=201" (ci-vm-10-0-136-251:39201) - Trying non-secure slapi_ldap_init_ext DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [96627] [28/Oct/2020:23:30:25.425921951 -0400] - DEBUG - NSMMReplicationPlugin - conn_connect - agmt="cn=201" (ci-vm-10-0-136-251:39201) - binddn = cn=ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701,ou=services,dc=example,dc=com, passwd = {AES-TUhNR0NTcUdTSWIzRFFFRkRUQm1NRVVHQ1NxR1NJYjNEUUVGRERBNEJDUmtaVFV5WlRsbVl5MWtORGhrTTJJdw0KTlMxaFl6VTNNamcwT1Mxak1XVm1aVEEyTlFBQ0FRSUNBU0F3Q2dZSUtvWklodmNOQWdjd0hRWUpZSVpJQVdVRA0KQkFFcUJCRDhpTkdqOTI2RVV0eS9mSGtIMW1INw==}1aP0CGo7VTyXKYAH9mQPJxPbmZao4qtbDnejHuqj6hueREUbjTmpIFJ9v+wKLJM0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [96672] hwtXYLylnARfBAm0SlP5kfwhZ0UpujEuyyXSDnqmiaE= DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [96847] [28/Oct/2020:23:30:25.434125014 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-136-251:39201) - No linger to cancel on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [96969] [28/Oct/2020:23:30:25.439816627 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 5f9a374b0002:1603942219:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [97090] [28/Oct/2020:23:30:25.442765950 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 5f9a37510000:1603942225:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [97258] [28/Oct/2020:23:30:25.446706863 -0400] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-136-251:39201): Replica was successfully acquired. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [97440] [28/Oct/2020:23:30:25.449377012 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: ready_to_acquire_replica -> sending_updates DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [97624] [28/Oct/2020:23:30:25.452380957 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Checking consumer schema localcsn:5f9a374b000000000000 / remotecsn:5f9a3748000000000000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [97758] [28/Oct/2020:23:30:25.731454670 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Reread remotecsn:5f9a374b000000000000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [97873] [28/Oct/2020:23:30:25.734620177 -0400] - DEBUG - csngen_adjust_time - gen state before 5f9a37510001:1603942225:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [98054] [28/Oct/2020:23:30:25.737375274 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-136-251:39201)): Consumer RUV: DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [98210] [28/Oct/2020:23:30:25.740103747 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replicageneration} 5f9a36f8000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [98423] [28/Oct/2020:23:30:25.743012096 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replica 1 ldap://localhost.localdomain:39001} 5f9a36f8000100010000 5f9a374b000000010000 00000000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [98604] [28/Oct/2020:23:30:25.745830472 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-136-251:39201)): Supplier RUV: DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [98760] [28/Oct/2020:23:30:25.750028069 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replicageneration} 5f9a36f8000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [98973] [28/Oct/2020:23:30:25.753788573 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replica 1 ldap://localhost.localdomain:39001} 5f9a36f8000100010000 5f9a374b000000010000 5f9a374b DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [99269] [28/Oct/2020:23:30:25.756847565 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-136-251:39201) - (cscb 0 - state 1) - csnPrevMax () csnMax (5f9a374b000000010000) csnBuf (00000000000000000000) csnConsumerMax (5f9a374b000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [99393] [28/Oct/2020:23:30:25.764225719 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_load_buffer - rc=-30988 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [99665] [28/Oct/2020:23:30:25.767319128 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_return_buffer - session end: state=5 load=0 sent=0 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [99814] [28/Oct/2020:23:30:25.770129808 -0400] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-136-251:39201): No changes to send DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [99978] [28/Oct/2020:23:30:25.776580341 -0400] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-136-251:39201): Successfully released consumer DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [100148] [28/Oct/2020:23:30:25.779715546 -0400] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-136-251:39201) - Beginning linger on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [100322] [28/Oct/2020:23:30:25.782371564 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: sending_updates -> wait_for_changes DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [100444] [28/Oct/2020:23:30:27.416678056 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 5f9a37510001:1603942225:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [100565] [28/Oct/2020:23:30:27.424629069 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 5f9a37530000:1603942227:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [100728] [28/Oct/2020:23:30:27.429983987 -0400] - DEBUG - NSMMReplicationPlugin - ruv_add_csn_inprogress - Successfully inserted csn 5f9a3753000000010000 into pending list DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [100908] [28/Oct/2020:23:30:27.433691391 -0400] - DEBUG - NSMMReplicationPlugin - purge_entry_state_information - From entry cn=test_entry,dc=example,dc=com up to CSN 5f90fccb000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [101153] [28/Oct/2020:23:30:27.439212022 -0400] - DEBUG - NSMMReplicationPlugin - write_changelog_and_ruv - Writing change for cn=test_entry,dc=example,dc=com (uniqid: e2746602-199611eb-bbbd91ac-3101a0f1, optype: 8) to changelog csn 5f9a3753000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [101328] [28/Oct/2020:23:30:27.442258667 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - cl5WriteOperationTxn - Successfully written entry with csn (5f9a3753000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [101466] [28/Oct/2020:23:30:27.445387659 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: committing all csns for csn 5f9a3753000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [101596] [28/Oct/2020:23:30:27.448569334 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: processing data csn 5f9a3753000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [101734] [28/Oct/2020:23:30:27.451541403 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Successfully committed csn 5f9a3753000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [101862] [28/Oct/2020:23:30:27.454563245 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Rolled up to csn 5f9a3753000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [101977] [28/Oct/2020:23:30:27.460000785 -0400] - DEBUG - replication - multimaster_mmr_postop - error 0 for operation 561. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [102152] [28/Oct/2020:23:30:27.465981512 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: wait_for_changes -> wait_for_changes DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [102335] [28/Oct/2020:23:30:27.469279034 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: wait_for_changes -> ready_to_acquire_replica DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [102507] [28/Oct/2020:23:30:27.472763368 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-136-251:39201) - Canceling linger on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [102675] [28/Oct/2020:23:30:27.477409784 -0400] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-136-251:39201): Replica was successfully acquired. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [102857] [28/Oct/2020:23:30:27.480540884 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: ready_to_acquire_replica -> sending_updates DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [102972] [28/Oct/2020:23:30:27.483830165 -0400] - DEBUG - csngen_adjust_time - gen state before 5f9a37530002:1603942227:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [103153] [28/Oct/2020:23:30:27.488342902 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-136-251:39201)): Consumer RUV: DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [103309] [28/Oct/2020:23:30:27.491521703 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replicageneration} 5f9a36f8000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [103522] [28/Oct/2020:23:30:27.494520961 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replica 1 ldap://localhost.localdomain:39001} 5f9a36f8000100010000 5f9a374b000000010000 00000000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [103703] [28/Oct/2020:23:30:27.498291325 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-136-251:39201)): Supplier RUV: DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [103859] [28/Oct/2020:23:30:27.501291249 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replicageneration} 5f9a36f8000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [104072] [28/Oct/2020:23:30:27.504768973 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replica 1 ldap://localhost.localdomain:39001} 5f9a36f8000100010000 5f9a3753000000010000 5f9a3753 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [104234] [28/Oct/2020:23:30:27.510145721 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_get_buffer - found thread private buffer cache 0x7ff56003b600 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [104461] [28/Oct/2020:23:30:27.513383543 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_get_buffer - _pool is 0x7ff58e23f2e0 _pool->pl_busy_lists is 0x7ff560f05ae0 _pool->pl_busy_lists->bl_buffers is 0x7ff56003b600 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [104757] [28/Oct/2020:23:30:27.517861008 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-136-251:39201) - (cscb 0 - state 0) - csnPrevMax () csnMax (5f9a3753000000010000) csnBuf (00000000000000000000) csnConsumerMax (5f9a374b000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [104870] [28/Oct/2020:23:30:27.521146951 -0400] - DEBUG - clcache_initial_anchorcsn - anchor is now: 5f9a374b000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [105061] [28/Oct/2020:23:30:27.524409064 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - agmt="cn=201" (ci-vm-10-0-136-251:39201): CSN 5f9a374b000000010000 found, position set for replay DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [105217] [28/Oct/2020:23:30:27.527154404 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_get_next_change - load=1 rec=1 csn=5f9a3753000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [105329] [28/Oct/2020:23:30:27.530133928 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Starting DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [105549] [28/Oct/2020:23:30:27.533515377 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-136-251:39201): Sending modify operation (dn="cn=test_entry,dc=example,dc=com" csn=5f9a3753000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [105681] [28/Oct/2020:23:30:27.536643845 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [105879] [28/Oct/2020:23:30:27.539541606 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-136-251:39201): Consumer successfully sent operation with csn 5f9a3753000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [106011] [28/Oct/2020:23:30:27.544703283 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [106326] [28/Oct/2020:23:30:27.548224088 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_adjust_anchorcsn - agmt="cn=201" (ci-vm-10-0-136-251:39201) - (cscb 0 - state 1) - csnPrevMax (5f9a3753000000010000) csnMax (5f9a3753000000010000) csnBuf (5f9a3753000000010000) csnConsumerMax (5f9a3753000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [106450] [28/Oct/2020:23:30:27.550989325 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_load_buffer - rc=-30988 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [106634] [28/Oct/2020:23:30:27.553941160 -0400] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-136-251:39201): No more updates to send (cl5GetNextOperationToReplay) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [106746] [28/Oct/2020:23:30:27.557219409 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 0 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [106879] [28/Oct/2020:23:30:27.560298814 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [107009] [28/Oct/2020:23:30:27.563503307 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Result 3, 0, 0, 10, (null) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [107142] [28/Oct/2020:23:30:27.566081608 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [107275] [28/Oct/2020:23:30:27.570072553 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [107408] [28/Oct/2020:23:30:27.575108411 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [107541] [28/Oct/2020:23:30:27.582510121 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [107674] [28/Oct/2020:23:30:27.594241575 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [107807] [28/Oct/2020:23:30:27.614031740 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [107940] [28/Oct/2020:23:30:27.649737327 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [108053] [28/Oct/2020:23:30:27.660460529 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 10 10 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [108162] [28/Oct/2020:23:30:27.717828418 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain exiting DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [108434] [28/Oct/2020:23:30:27.722026769 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_return_buffer - session end: state=5 load=1 sent=1 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [108598] [28/Oct/2020:23:30:27.729058487 -0400] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-136-251:39201): Successfully released consumer DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [108768] [28/Oct/2020:23:30:27.733411819 -0400] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-136-251:39201) - Beginning linger on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [108942] [28/Oct/2020:23:30:27.738263646 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: sending_updates -> wait_for_changes DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [109050] [28/Oct/2020:23:30:28.738531540 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [109165] [28/Oct/2020:23:30:28.765188634 -0400] - DEBUG - replication - multimaster_mmr_postop - error 0 for operation 561. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [109273] [28/Oct/2020:23:30:28.790678628 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [109381] [28/Oct/2020:23:30:28.854744055 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [109503] [28/Oct/2020:23:30:28.872204212 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 5f9a37530002:1603942227:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [109624] [28/Oct/2020:23:30:28.875887598 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 5f9a37540000:1603942228:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [109787] [28/Oct/2020:23:30:28.879352420 -0400] - DEBUG - NSMMReplicationPlugin - ruv_add_csn_inprogress - Successfully inserted csn 5f9a3754000000010000 into pending list DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [109967] [28/Oct/2020:23:30:28.882632278 -0400] - DEBUG - NSMMReplicationPlugin - purge_entry_state_information - From entry cn=test_entry,dc=example,dc=com up to CSN 5f90fcd3000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [110212] [28/Oct/2020:23:30:28.886332920 -0400] - DEBUG - NSMMReplicationPlugin - write_changelog_and_ruv - Writing change for cn=test_entry,dc=example,dc=com (uniqid: e2746602-199611eb-bbbd91ac-3101a0f1, optype: 8) to changelog csn 5f9a3754000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [110387] [28/Oct/2020:23:30:28.889957675 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - cl5WriteOperationTxn - Successfully written entry with csn (5f9a3754000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [110525] [28/Oct/2020:23:30:28.892961143 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: committing all csns for csn 5f9a3754000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [110655] [28/Oct/2020:23:30:28.895706465 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: processing data csn 5f9a3754000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [110793] [28/Oct/2020:23:30:28.898429382 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Successfully committed csn 5f9a3754000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [110921] [28/Oct/2020:23:30:28.900988157 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Rolled up to csn 5f9a3754000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [111036] [28/Oct/2020:23:30:28.904053864 -0400] - DEBUG - replication - multimaster_mmr_postop - error 0 for operation 561. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [111211] [28/Oct/2020:23:30:28.908520966 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: wait_for_changes -> wait_for_changes DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [111394] [28/Oct/2020:23:30:28.911581642 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: wait_for_changes -> ready_to_acquire_replica DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [111566] [28/Oct/2020:23:30:28.914430972 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-136-251:39201) - Canceling linger on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [111734] [28/Oct/2020:23:30:28.918583904 -0400] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-136-251:39201): Replica was successfully acquired. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [111916] [28/Oct/2020:23:30:28.921936996 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: ready_to_acquire_replica -> sending_updates DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [112100] [28/Oct/2020:23:30:28.925188053 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Checking consumer schema localcsn:5f9a3754000000000000 / remotecsn:5f9a374b000000000000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [112234] [28/Oct/2020:23:30:29.138069147 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Reread remotecsn:5f9a374b000000000000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [112419] [28/Oct/2020:23:30:29.141965966 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - Schema checking successful: ok to push the schema (agmt="cn=201" (ci-vm-10-0-136-251:39201)) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [112534] [28/Oct/2020:23:30:29.335835197 -0400] - DEBUG - csngen_adjust_time - gen state before 5f9a37540002:1603942228:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [112656] [28/Oct/2020:23:30:29.368488854 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 5f9a37540002:1603942228:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [112777] [28/Oct/2020:23:30:29.386897469 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 5f9a37550000:1603942229:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [112958] [28/Oct/2020:23:30:29.390301483 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-136-251:39201)): Consumer RUV: DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [113114] [28/Oct/2020:23:30:29.393141922 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replicageneration} 5f9a36f8000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [113327] [28/Oct/2020:23:30:29.396384360 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replica 1 ldap://localhost.localdomain:39001} 5f9a36f8000100010000 5f9a3753000000010000 00000000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [113508] [28/Oct/2020:23:30:29.399770396 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-136-251:39201)): Supplier RUV: DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [113664] [28/Oct/2020:23:30:29.402911635 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replicageneration} 5f9a36f8000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [113877] [28/Oct/2020:23:30:29.405944812 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replica 1 ldap://localhost.localdomain:39001} 5f9a36f8000100010000 5f9a3754000000010000 5f9a3754 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [114039] [28/Oct/2020:23:30:29.408611561 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_get_buffer - found thread private buffer cache 0x7ff56003b600 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [114266] [28/Oct/2020:23:30:29.411704893 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_get_buffer - _pool is 0x7ff58e23f2e0 _pool->pl_busy_lists is 0x7ff560f05ae0 _pool->pl_busy_lists->bl_buffers is 0x7ff56003b600 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [114562] [28/Oct/2020:23:30:29.414890461 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-136-251:39201) - (cscb 0 - state 0) - csnPrevMax () csnMax (5f9a3754000000010000) csnBuf (5f9a3753000000010000) csnConsumerMax (5f9a3753000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [114675] [28/Oct/2020:23:30:29.417871163 -0400] - DEBUG - clcache_initial_anchorcsn - anchor is now: 5f9a3753000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [114866] [28/Oct/2020:23:30:29.420965954 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - agmt="cn=201" (ci-vm-10-0-136-251:39201): CSN 5f9a3753000000010000 found, position set for replay DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [115022] [28/Oct/2020:23:30:29.424433048 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_get_next_change - load=1 rec=1 csn=5f9a3754000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [115134] [28/Oct/2020:23:30:29.427873979 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Starting DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [115354] [28/Oct/2020:23:30:29.431001484 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-136-251:39201): Sending modify operation (dn="cn=test_entry,dc=example,dc=com" csn=5f9a3754000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [115486] [28/Oct/2020:23:30:29.434490060 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [115684] [28/Oct/2020:23:30:29.437901313 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-136-251:39201): Consumer successfully sent operation with csn 5f9a3754000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [115999] [28/Oct/2020:23:30:29.440890537 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_adjust_anchorcsn - agmt="cn=201" (ci-vm-10-0-136-251:39201) - (cscb 0 - state 1) - csnPrevMax (5f9a3754000000010000) csnMax (5f9a3754000000010000) csnBuf (5f9a3754000000010000) csnConsumerMax (5f9a3754000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [116123] [28/Oct/2020:23:30:29.443828019 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_load_buffer - rc=-30988 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [116307] [28/Oct/2020:23:30:29.446611682 -0400] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-136-251:39201): No more updates to send (cl5GetNextOperationToReplay) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [116419] [28/Oct/2020:23:30:29.450222014 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 0 17 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [116552] [28/Oct/2020:23:30:29.453705761 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 17 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [116682] [28/Oct/2020:23:30:29.459603552 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Result 3, 0, 0, 17, (null) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [116815] [28/Oct/2020:23:30:29.463200113 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 17 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [116948] [28/Oct/2020:23:30:29.467721328 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 17 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [117081] [28/Oct/2020:23:30:29.472935418 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 17 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [117214] [28/Oct/2020:23:30:29.481214820 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 17 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [117347] [28/Oct/2020:23:30:29.492778935 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 17 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [117480] [28/Oct/2020:23:30:29.512814594 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 17 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [117613] [28/Oct/2020:23:30:29.548388282 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 17 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [117726] [28/Oct/2020:23:30:29.553888471 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 17 17 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [117835] [28/Oct/2020:23:30:29.617036437 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain exiting DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [118107] [28/Oct/2020:23:30:29.620498368 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_return_buffer - session end: state=5 load=1 sent=1 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [118271] [28/Oct/2020:23:30:29.627027274 -0400] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-136-251:39201): Successfully released consumer DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [118441] [28/Oct/2020:23:30:29.629944578 -0400] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-136-251:39201) - Beginning linger on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [118615] [28/Oct/2020:23:30:29.632897859 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: sending_updates -> wait_for_changes DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [118723] [28/Oct/2020:23:30:29.913033321 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [118894] [28/Oct/2020:23:30:29.916898334 -0400] - DEBUG - NSMMReplicationPlugin - agmt_set_enabled_from_entry: agreement is now disabled (agmt="cn=201" (ci-vm-10-0-136-251:39201)) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [119069] [28/Oct/2020:23:30:29.920164973 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: wait_for_changes -> wait_for_changes DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [119244] [28/Oct/2020:23:30:29.923426572 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: wait_for_changes -> wait_for_changes DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [119416] [28/Oct/2020:23:30:29.926859387 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-136-251:39201) - Canceling linger on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [119591] [28/Oct/2020:23:30:29.930333934 -0400] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-136-251:39201) - Disconnected from the consumer DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [119756] [28/Oct/2020:23:30:30.020368750 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_stop - agmt="cn=201" (ci-vm-10-0-136-251:39201): Protocol stopped after 0 seconds DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [119884] [28/Oct/2020:23:30:30.023996102 -0400] - DEBUG - NSMMReplicationPlugin - Database RUV: {replicageneration} 5f9a36f8000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [120069] [28/Oct/2020:23:30:30.028529369 -0400] - DEBUG - NSMMReplicationPlugin - Database RUV: {replica 1 ldap://localhost.localdomain:39001} 5f9a36f8000100010000 5f9a3754000000010000 5f9a3754 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [120244] [28/Oct/2020:23:30:30.033837675 -0400] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-136-251:39201) - Disconnected from the consumer DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [120352] [28/Oct/2020:23:30:30.036913817 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [120460] [28/Oct/2020:23:30:35.055616280 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [120630] [28/Oct/2020:23:30:35.059088120 -0400] - DEBUG - NSMMReplicationPlugin - agmt_set_enabled_from_entry: agreement is now enabled (agmt="cn=201" (ci-vm-10-0-136-251:39201)) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [120805] [28/Oct/2020:23:30:35.062203338 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-136-251:39201) - No linger to cancel on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [120980] [28/Oct/2020:23:30:35.065105325 -0400] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-136-251:39201) - Disconnected from the consumer DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [121152] [28/Oct/2020:23:30:35.069274001 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: start -> ready_to_acquire_replica DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [121321] [28/Oct/2020:23:30:35.072707130 -0400] - DEBUG - NSMMReplicationPlugin - conn_connect - agmt="cn=201" (ci-vm-10-0-136-251:39201) - Trying non-secure slapi_ldap_init_ext DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [121849] [28/Oct/2020:23:30:35.076168656 -0400] - DEBUG - NSMMReplicationPlugin - conn_connect - agmt="cn=201" (ci-vm-10-0-136-251:39201) - binddn = cn=ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701,ou=services,dc=example,dc=com, passwd = {AES-TUhNR0NTcUdTSWIzRFFFRkRUQm1NRVVHQ1NxR1NJYjNEUUVGRERBNEJDUmtaVFV5WlRsbVl5MWtORGhrTTJJdw0KTlMxaFl6VTNNamcwT1Mxak1XVm1aVEEyTlFBQ0FRSUNBU0F3Q2dZSUtvWklodmNOQWdjd0hRWUpZSVpJQVdVRA0KQkFFcUJCRDhpTkdqOTI2RVV0eS9mSGtIMW1INw==}1aP0CGo7VTyXKYAH9mQPJxPbmZao4qtbDnejHuqj6hueREUbjTmpIFJ9v+wKLJM0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [121894] hwtXYLylnARfBAm0SlP5kfwhZ0UpujEuyyXSDnqmiaE= DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [122069] [28/Oct/2020:23:30:35.080030434 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-136-251:39201) - No linger to cancel on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [122191] [28/Oct/2020:23:30:35.085251327 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 5f9a37550000:1603942229:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [122312] [28/Oct/2020:23:30:35.095667574 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 5f9a375b0000:1603942235:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [122480] [28/Oct/2020:23:30:35.099965233 -0400] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-136-251:39201): Replica was successfully acquired. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [122662] [28/Oct/2020:23:30:35.103586849 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: ready_to_acquire_replica -> sending_updates DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [122846] [28/Oct/2020:23:30:35.106968917 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Checking consumer schema localcsn:5f9a3754000000000000 / remotecsn:5f9a374b000000000000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [122980] [28/Oct/2020:23:30:35.330425612 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Reread remotecsn:5f9a3754000000000000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [123095] [28/Oct/2020:23:30:35.334304864 -0400] - DEBUG - csngen_adjust_time - gen state before 5f9a375b0001:1603942235:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [123276] [28/Oct/2020:23:30:35.338070600 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-136-251:39201)): Consumer RUV: DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [123432] [28/Oct/2020:23:30:35.341704801 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replicageneration} 5f9a36f8000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [123645] [28/Oct/2020:23:30:35.344530596 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replica 1 ldap://localhost.localdomain:39001} 5f9a36f8000100010000 5f9a3754000000010000 00000000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [123826] [28/Oct/2020:23:30:35.347294619 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-136-251:39201)): Supplier RUV: DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [123982] [28/Oct/2020:23:30:35.350248737 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replicageneration} 5f9a36f8000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [124195] [28/Oct/2020:23:30:35.353329096 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replica 1 ldap://localhost.localdomain:39001} 5f9a36f8000100010000 5f9a3754000000010000 5f9a3754 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [124491] [28/Oct/2020:23:30:35.356238494 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-136-251:39201) - (cscb 0 - state 1) - csnPrevMax () csnMax (5f9a3754000000010000) csnBuf (00000000000000000000) csnConsumerMax (5f9a3754000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [124615] [28/Oct/2020:23:30:35.359536365 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_load_buffer - rc=-30988 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [124887] [28/Oct/2020:23:30:35.362645798 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_return_buffer - session end: state=5 load=0 sent=0 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [125036] [28/Oct/2020:23:30:35.365664984 -0400] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-136-251:39201): No changes to send DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [125200] [28/Oct/2020:23:30:35.372161157 -0400] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-136-251:39201): Successfully released consumer DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [125370] [28/Oct/2020:23:30:35.375528134 -0400] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-136-251:39201) - Beginning linger on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [125544] [28/Oct/2020:23:30:35.378852963 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: sending_updates -> wait_for_changes DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [125666] [28/Oct/2020:23:30:37.070595905 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 5f9a375b0001:1603942235:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [125787] [28/Oct/2020:23:30:37.089024779 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 5f9a375d0000:1603942237:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [125950] [28/Oct/2020:23:30:37.092511592 -0400] - DEBUG - NSMMReplicationPlugin - ruv_add_csn_inprogress - Successfully inserted csn 5f9a375d000000010000 into pending list DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [126130] [28/Oct/2020:23:30:37.095956381 -0400] - DEBUG - NSMMReplicationPlugin - purge_entry_state_information - From entry cn=test_entry,dc=example,dc=com up to CSN 5f90fcd4000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [126375] [28/Oct/2020:23:30:37.099548849 -0400] - DEBUG - NSMMReplicationPlugin - write_changelog_and_ruv - Writing change for cn=test_entry,dc=example,dc=com (uniqid: e2746602-199611eb-bbbd91ac-3101a0f1, optype: 8) to changelog csn 5f9a375d000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [126550] [28/Oct/2020:23:30:37.102664692 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - cl5WriteOperationTxn - Successfully written entry with csn (5f9a375d000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [126688] [28/Oct/2020:23:30:37.105452638 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: committing all csns for csn 5f9a375d000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [126818] [28/Oct/2020:23:30:37.109417768 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: processing data csn 5f9a375d000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [126956] [28/Oct/2020:23:30:37.112522032 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Successfully committed csn 5f9a375d000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [127084] [28/Oct/2020:23:30:37.115574450 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Rolled up to csn 5f9a375d000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [127199] [28/Oct/2020:23:30:37.118654888 -0400] - DEBUG - replication - multimaster_mmr_postop - error 0 for operation 561. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [127374] [28/Oct/2020:23:30:37.132826337 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: wait_for_changes -> wait_for_changes DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [127557] [28/Oct/2020:23:30:37.137709665 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: wait_for_changes -> ready_to_acquire_replica DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [127729] [28/Oct/2020:23:30:37.141310951 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-136-251:39201) - Canceling linger on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [127897] [28/Oct/2020:23:30:37.147149577 -0400] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-136-251:39201): Replica was successfully acquired. DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [128079] [28/Oct/2020:23:30:37.150572804 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: ready_to_acquire_replica -> sending_updates DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [128194] [28/Oct/2020:23:30:37.154608779 -0400] - DEBUG - csngen_adjust_time - gen state before 5f9a375d0002:1603942237:0:0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [128375] [28/Oct/2020:23:30:37.158366009 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-136-251:39201)): Consumer RUV: DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [128531] [28/Oct/2020:23:30:37.162311207 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replicageneration} 5f9a36f8000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [128744] [28/Oct/2020:23:30:37.167256687 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replica 1 ldap://localhost.localdomain:39001} 5f9a36f8000100010000 5f9a3754000000010000 00000000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [128925] [28/Oct/2020:23:30:37.172298818 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-136-251:39201)): Supplier RUV: DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [129081] [28/Oct/2020:23:30:37.175391366 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replicageneration} 5f9a36f8000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [129294] [28/Oct/2020:23:30:37.178722868 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-136-251:39201): {replica 1 ldap://localhost.localdomain:39001} 5f9a36f8000100010000 5f9a375d000000010000 5f9a375d DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [129456] [28/Oct/2020:23:30:37.184181163 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_get_buffer - found thread private buffer cache 0x7ff560060600 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [129683] [28/Oct/2020:23:30:37.187595475 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_get_buffer - _pool is 0x7ff58e23f2e0 _pool->pl_busy_lists is 0x7ff560f05ae0 _pool->pl_busy_lists->bl_buffers is 0x7ff560060600 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [129979] [28/Oct/2020:23:30:37.190982448 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-136-251:39201) - (cscb 0 - state 0) - csnPrevMax () csnMax (5f9a375d000000010000) csnBuf (00000000000000000000) csnConsumerMax (5f9a3754000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [130092] [28/Oct/2020:23:30:37.194325557 -0400] - DEBUG - clcache_initial_anchorcsn - anchor is now: 5f9a3754000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [130283] [28/Oct/2020:23:30:37.197345945 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - agmt="cn=201" (ci-vm-10-0-136-251:39201): CSN 5f9a3754000000010000 found, position set for replay DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [130439] [28/Oct/2020:23:30:37.200963445 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_get_next_change - load=1 rec=1 csn=5f9a375d000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [130551] [28/Oct/2020:23:30:37.204334159 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Starting DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [130683] [28/Oct/2020:23:30:37.207604257 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [130903] [28/Oct/2020:23:30:37.210951212 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-136-251:39201): Sending modify operation (dn="cn=test_entry,dc=example,dc=com" csn=5f9a375d000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [131035] [28/Oct/2020:23:30:37.218819598 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [131233] [28/Oct/2020:23:30:37.224116804 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-136-251:39201): Consumer successfully sent operation with csn 5f9a375d000000010000 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [131365] [28/Oct/2020:23:30:37.229167323 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [131680] [28/Oct/2020:23:30:37.232174949 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_adjust_anchorcsn - agmt="cn=201" (ci-vm-10-0-136-251:39201) - (cscb 0 - state 1) - csnPrevMax (5f9a375d000000010000) csnMax (5f9a375d000000010000) csnBuf (5f9a375d000000010000) csnConsumerMax (5f9a375d000000010000) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [131804] [28/Oct/2020:23:30:37.235541038 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_load_buffer - rc=-30988 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [131988] [28/Oct/2020:23:30:37.238609008 -0400] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-136-251:39201): No more updates to send (cl5GetNextOperationToReplay) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [132100] [28/Oct/2020:23:30:37.241781614 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 0 11 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [132233] [28/Oct/2020:23:30:37.245271447 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [132363] [28/Oct/2020:23:30:37.248568454 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Result 3, 0, 0, 11, (null) DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [132496] [28/Oct/2020:23:30:37.251655358 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [132629] [28/Oct/2020:23:30:37.256644400 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [132762] [28/Oct/2020:23:30:37.266540115 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [132895] [28/Oct/2020:23:30:37.275456651 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [133028] [28/Oct/2020:23:30:37.287177941 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [133161] [28/Oct/2020:23:30:37.307586129 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [133294] [28/Oct/2020:23:30:37.343947059 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [133407] [28/Oct/2020:23:30:37.347723023 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 11 11 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [133516] [28/Oct/2020:23:30:37.412316073 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain exiting DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [133788] [28/Oct/2020:23:30:37.416593564 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-136-251:39201) - clcache_return_buffer - session end: state=5 load=1 sent=1 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [133952] [28/Oct/2020:23:30:37.423992775 -0400] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-136-251:39201): Successfully released consumer DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [134122] [28/Oct/2020:23:30:37.427113243 -0400] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-136-251:39201) - Beginning linger on the connection DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [134296] [28/Oct/2020:23:30:37.430226701 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-136-251:39201): State: sending_updates -> wait_for_changes DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [134296] DEBUG  tests.suites.schema.schema_replication_test:schema_replication_test.py:67 _pattern_errorlog: end at offset 134296 INFO  tests.suites.schema.schema_replication_test:schema_replication_test.py:693 Testcase PASSED
Passed suites/schema/schema_test.py::test_schema_comparewithfiles 0.14
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.suites.schema.schema_test:schema_test.py:125 Running test_schema_comparewithfiles... WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /etc/dirsrv/slapd-standalone1/schema/99user.ldif as a schema file - skipping WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/10mep-plugin.ldif as a schema file - skipping WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/60autofs.ldif as a schema file - skipping WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/10automember-plugin.ldif as a schema file - skipping WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/25java-object.ldif as a schema file - skipping WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/00core.ldif as a schema file - skipping WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/30ns-common.ldif as a schema file - skipping WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/60pam-plugin.ldif as a schema file - skipping WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/60posix-winsync-plugin.ldif as a schema file - skipping WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/28pilot.ldif as a schema file - skipping WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/02common.ldif as a schema file - skipping WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/50ns-directory.ldif as a schema file - skipping WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/10dna-plugin.ldif as a schema file - skipping WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/50ns-admin.ldif as a schema file - skipping WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/20subscriber.ldif as a schema file - skipping WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/10rfc2307compat.ldif as a schema file - skipping WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/60samba3.ldif as a schema file - skipping WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/60trust.ldif as a schema file - skipping WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/60mozilla.ldif as a schema file - skipping WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/60pureftpd.ldif as a schema file - skipping WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/05rfc4524.ldif as a schema file - skipping WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/60nss-ldap.ldif as a schema file - skipping WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/50ns-certificate.ldif as a schema file - skipping WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/60eduperson.ldif as a schema file - skipping WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/05rfc2927.ldif as a schema file - skipping WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/60acctpolicy.ldif as a schema file - skipping WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/50ns-web.ldif as a schema file - skipping WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/60sudo.ldif as a schema file - skipping WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/60rfc3712.ldif as a schema file - skipping WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/50ns-mail.ldif as a schema file - skipping WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/01core389.ldif as a schema file - skipping WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/05rfc4523.ldif as a schema file - skipping WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/50ns-value.ldif as a schema file - skipping WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/60sabayon.ldif as a schema file - skipping WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/06inetorgperson.ldif as a schema file - skipping WARNING  tests.suites.schema.schema_test:schema_test.py:137 Unable to parse /usr/share/dirsrv/schema/60rfc2739.ldif as a schema file - skipping INFO  tests.suites.schema.schema_test:schema_test.py:166 test_schema_comparewithfiles: PASSED
Passed suites/setup_ds/dscreate_test.py::test_setup_ds_minimal_dry 0.16
-------------------------------Captured log setup-------------------------------
DEBUG  lib389:dscreate_test.py:42 Instance allocated DEBUG  lib389:__init__.py:554 Allocate <class 'lib389.DirSrv'> with None DEBUG  lib389:__init__.py:577 Allocate <class 'lib389.DirSrv'> with ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:54321 DEBUG  lib389:__init__.py:602 Allocate <class 'lib389.DirSrv'> with ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:54321 DEBUG  lib389:__init__.py:742 list instance not found in /etc/dirsrv/slapd-standalone/dse.ldif: standalone
-------------------------------Captured log call--------------------------------
DEBUG  lib389:__init__.py:742 list instance not found in /etc/dirsrv/slapd-standalone/dse.ldif: standalone INFO  LogCapture.SetupDs:setup.py:670 NOOP: Dry run requested DEBUG  lib389:__init__.py:742 list instance not found in /etc/dirsrv/slapd-standalone/dse.ldif: standalone
Passed suites/setup_ds/dscreate_test.py::test_setup_ds_minimal 21.88
-------------------------------Captured log setup-------------------------------
DEBUG  lib389:dscreate_test.py:42 Instance allocated DEBUG  lib389:__init__.py:554 Allocate <class 'lib389.DirSrv'> with None DEBUG  lib389:__init__.py:577 Allocate <class 'lib389.DirSrv'> with ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:54321 DEBUG  lib389:__init__.py:602 Allocate <class 'lib389.DirSrv'> with ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:54321 DEBUG  lib389:__init__.py:742 list instance not found in /etc/dirsrv/slapd-standalone/dse.ldif: standalone
-------------------------------Captured log call--------------------------------
DEBUG  lib389:__init__.py:742 list instance not found in /etc/dirsrv/slapd-standalone/dse.ldif: standalone DEBUG  lib389:__init__.py:554 Allocate <class 'lib389.DirSrv'> with None DEBUG  lib389:__init__.py:566 Allocate <class 'lib389.DirSrv'> with /var/run/slapd-standalone.socket DEBUG  lib389:__init__.py:577 Allocate <class 'lib389.DirSrv'> with localhost:54321 DEBUG  lib389:__init__.py:602 Allocate <class 'lib389.DirSrv'> with localhost:54321 DEBUG  lib389:nss_ssl.py:197 nss cmd: /usr/bin/certutil -N -d /etc/dirsrv/slapd-standalone -f /etc/dirsrv/slapd-standalone/pwdfile.txt DEBUG  lib389:nss_ssl.py:199 nss output: DEBUG  lib389.nss_ssl:nss_ssl.py:314 nss cmd: /usr/bin/certutil -L -n Self-Signed-CA -d /etc/dirsrv/ssca/ DEBUG  lib389:nss_ssl.py:559 CSR subject -> CN=ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com,givenName=b306ac1b-870a-4413-b2eb-56f30e6c844f,O=testing,L=389ds,ST=Queensland,C=AU DEBUG  lib389:nss_ssl.py:560 CSR alt_names -> ['ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com'] DEBUG  lib389:nss_ssl.py:592 nss cmd: /usr/bin/certutil -R --keyUsage digitalSignature,nonRepudiation,keyEncipherment,dataEncipherment --nsCertType sslClient,sslServer --extKeyUsage clientAuth,serverAuth -s CN=ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com,givenName=b306ac1b-870a-4413-b2eb-56f30e6c844f,O=testing,L=389ds,ST=Queensland,C=AU -8 ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com -g 4096 -d /etc/dirsrv/slapd-standalone -z /etc/dirsrv/slapd-standalone/noise.txt -f /etc/dirsrv/slapd-standalone/pwdfile.txt -a -o /etc/dirsrv/slapd-standalone/Server-Cert.csr DEBUG  lib389.nss_ssl:nss_ssl.py:618 nss cmd: /usr/bin/certutil -C -d /etc/dirsrv/ssca/ -f /etc/dirsrv/ssca//pwdfile.txt -v 24 -a -i /etc/dirsrv/slapd-standalone/Server-Cert.csr -o /etc/dirsrv/slapd-standalone/Server-Cert.crt -c Self-Signed-CA DEBUG  lib389:nss_ssl.py:242 nss cmd: /usr/bin/openssl rehash /etc/dirsrv/slapd-standalone DEBUG  lib389:nss_ssl.py:646 nss cmd: /usr/bin/certutil -A -n Self-Signed-CA -t CT,, -a -i /etc/dirsrv/slapd-standalone/ca.crt -d /etc/dirsrv/slapd-standalone -f /etc/dirsrv/slapd-standalone/pwdfile.txt DEBUG  lib389:nss_ssl.py:661 nss cmd: /usr/bin/certutil -A -n Server-Cert -t ,, -a -i /etc/dirsrv/slapd-standalone/Server-Cert.crt -d /etc/dirsrv/slapd-standalone -f /etc/dirsrv/slapd-standalone/pwdfile.txt DEBUG  lib389:nss_ssl.py:670 nss cmd: /usr/bin/certutil -V -d /etc/dirsrv/slapd-standalone -n Server-Cert -u YCV DEBUG  lib389.utils:utils.py:284 port 636 already in [389, 636, 3268, 3269, 7389], skipping port relabel DEBUG  lib389.utils:utils.py:315 CMD: semanage port -a -t ldap_port_t -p tcp 54321 ; STDOUT: ; STDERR: DEBUG  lib389:__init__.py:1173 systemd status -> True DEBUG  lib389:__init__.py:1077 systemd status -> True DEBUG  lib389:__init__.py:937 open(): Connecting to uri ldapi://%2Fvar%2Frun%2Fslapd-standalone.socket DEBUG  lib389:__init__.py:945 Using dirsrv ca certificate /etc/dirsrv/slapd-standalone DEBUG  lib389:__init__.py:954 Using external ca certificate /etc/dirsrv/slapd-standalone DEBUG  lib389:__init__.py:967 Using external ca certificate /etc/dirsrv/slapd-standalone DEBUG  lib389:__init__.py:975 Using certificate policy 1 DEBUG  lib389:__init__.py:976 ldap.OPT_X_TLS_REQUIRE_CERT = 1 DEBUG  lib389:__init__.py:1009 open(): Using root autobind ... DEBUG  lib389:__init__.py:1030 open(): bound as cn=Directory Manager DEBUG  lib389:__init__.py:1692 Retrieving entry with [('',)] DEBUG  lib389:__init__.py:1702 Retrieved entry [dn: vendorVersion: 389-Directory/1.4.5.0 B2020.303.0011 ] DEBUG  lib389:__init__.py:937 open(): Connecting to uri ldapi://%2Fvar%2Frun%2Fslapd-standalone.socket DEBUG  lib389:__init__.py:945 Using dirsrv ca certificate /etc/dirsrv/slapd-standalone DEBUG  lib389:__init__.py:954 Using external ca certificate /etc/dirsrv/slapd-standalone DEBUG  lib389:__init__.py:967 Using external ca certificate /etc/dirsrv/slapd-standalone DEBUG  lib389:__init__.py:975 Using certificate policy 1 DEBUG  lib389:__init__.py:976 ldap.OPT_X_TLS_REQUIRE_CERT = 1 DEBUG  lib389:__init__.py:1009 open(): Using root autobind ... DEBUG  lib389:__init__.py:1030 open(): bound as cn=Directory Manager DEBUG  lib389:__init__.py:1692 Retrieving entry with [('',)] DEBUG  lib389:__init__.py:1702 Retrieved entry [dn: vendorVersion: 389-Directory/1.4.5.0 B2020.303.0011 ] DEBUG  Config:_mapped_object.py:434 cn=config set REPLACE: ('nsslapd-secureport', '636') DEBUG  Config:_mapped_object.py:434 cn=config set REPLACE: ('nsslapd-security', 'on') DEBUG  Config:_mapped_object.py:434 cn=config set REPLACE: ('nsslapd-rootpw', '********') DEBUG  lib389:__init__.py:1173 systemd status -> True DEBUG  lib389:__init__.py:1146 systemd status -> True DEBUG  lib389:__init__.py:1173 systemd status -> True DEBUG  lib389:__init__.py:1077 systemd status -> True DEBUG  lib389:__init__.py:937 open(): Connecting to uri ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:54321/ DEBUG  lib389:__init__.py:945 Using dirsrv ca certificate /etc/dirsrv/slapd-standalone DEBUG  lib389:__init__.py:954 Using external ca certificate /etc/dirsrv/slapd-standalone DEBUG  lib389:__init__.py:967 Using external ca certificate /etc/dirsrv/slapd-standalone DEBUG  lib389:__init__.py:975 Using certificate policy 1 DEBUG  lib389:__init__.py:976 ldap.OPT_X_TLS_REQUIRE_CERT = 1 DEBUG  lib389:__init__.py:1030 open(): bound as cn=Directory Manager DEBUG  lib389:__init__.py:1692 Retrieving entry with [('',)] DEBUG  lib389:__init__.py:1702 Retrieved entry [dn: vendorVersion: 389-Directory/1.4.5.0 B2020.303.0011 ] DEBUG  lib389:__init__.py:1173 systemd status -> True DEBUG  lib389:__init__.py:1146 systemd status -> True DEBUG  lib389:__init__.py:1173 systemd status -> True DEBUG  lib389:__init__.py:1077 systemd status -> True DEBUG  lib389:__init__.py:937 open(): Connecting to uri ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:54321/ DEBUG  lib389:__init__.py:945 Using dirsrv ca certificate /etc/dirsrv/slapd-standalone DEBUG  lib389:__init__.py:954 Using external ca certificate /etc/dirsrv/slapd-standalone DEBUG  lib389:__init__.py:967 Using external ca certificate /etc/dirsrv/slapd-standalone DEBUG  lib389:__init__.py:975 Using certificate policy 1 DEBUG  lib389:__init__.py:976 ldap.OPT_X_TLS_REQUIRE_CERT = 1 DEBUG  lib389:__init__.py:1030 open(): bound as cn=Directory Manager DEBUG  lib389:__init__.py:1692 Retrieving entry with [('',)] DEBUG  lib389:__init__.py:1702 Retrieved entry [dn: vendorVersion: 389-Directory/1.4.5.0 B2020.303.0011 ] DEBUG  lib389.remove_ds:remove.py:38 Removing instance standalone DEBUG  lib389:__init__.py:1692 Retrieving entry with [('cn=config',)] DEBUG  lib389:__init__.py:1702 Retrieved entry [dn: cn=config nsslapd-bakdir: /var/lib/dirsrv/slapd-standalone/bak ] DEBUG  lib389:__init__.py:1692 Retrieving entry with [('cn=config',)] DEBUG  lib389:__init__.py:1702 Retrieved entry [dn: cn=config nsslapd-certdir: /etc/dirsrv/slapd-standalone ] DEBUG  lib389:__init__.py:1692 Retrieving entry with [('cn=config,cn=ldbm database,cn=plugins,cn=config',)] DEBUG  lib389:__init__.py:1702 Retrieved entry [dn: cn=config,cn=ldbm database,cn=plugins,cn=config nsslapd-directory: /var/lib/dirsrv/slapd-standalone/db ] DEBUG  lib389:__init__.py:1692 Retrieving entry with [('cn=bdb,cn=config,cn=ldbm database,cn=plugins,cn=config',)] DEBUG  lib389:__init__.py:1702 Retrieved entry [dn: cn=bdb,cn=config,cn=ldbm database,cn=plugins,cn=config nsslapd-db-home-directory: /var/lib/dirsrv/slapd-standalone/db ] DEBUG  lib389:__init__.py:1692 Retrieving entry with [('cn=config,cn=ldbm database,cn=plugins,cn=config',)] DEBUG  lib389:__init__.py:1702 Retrieved entry [dn: cn=config,cn=ldbm database,cn=plugins,cn=config nsslapd-directory: /var/lib/dirsrv/slapd-standalone/db ] DEBUG  lib389:__init__.py:1692 Retrieving entry with [('cn=config,cn=ldbm database,cn=plugins,cn=config',)] DEBUG  lib389:__init__.py:1702 Retrieved entry [dn: cn=config,cn=ldbm database,cn=plugins,cn=config nsslapd-directory: /var/lib/dirsrv/slapd-standalone/db ] DEBUG  lib389:__init__.py:1692 Retrieving entry with [('cn=config',)] DEBUG  lib389:__init__.py:1702 Retrieved entry [dn: cn=config nsslapd-ldifdir: /var/lib/dirsrv/slapd-standalone/ldif ] DEBUG  lib389:__init__.py:1692 Retrieving entry with [('cn=config',)] DEBUG  lib389:__init__.py:1702 Retrieved entry [dn: cn=config nsslapd-lockdir: /var/lock/dirsrv/slapd-standalone ] DEBUG  lib389:__init__.py:1692 Retrieving entry with [('cn=config',)] DEBUG  lib389:__init__.py:1702 Retrieved entry [dn: cn=config nsslapd-instancedir: /usr/lib64/dirsrv/slapd-standalone ] DEBUG  lib389.remove_ds:remove.py:67 Checking for instance marker at /etc/dirsrv/slapd-standalone/dse.ldif DEBUG  lib389.remove_ds:remove.py:72 Found instance marker at /etc/dirsrv/slapd-standalone/dse.ldif! Proceeding to remove ... DEBUG  lib389.remove_ds:remove.py:76 Stopping instance standalone DEBUG  lib389:__init__.py:1173 systemd status -> True DEBUG  lib389:__init__.py:1146 systemd status -> True DEBUG  lib389.remove_ds:remove.py:79 Found instance marker at /etc/dirsrv/slapd-standalone/dse.ldif! Proceeding to remove ... DEBUG  lib389.remove_ds:remove.py:83 Stopping instance standalone DEBUG  lib389:__init__.py:1173 systemd status -> True DEBUG  lib389.remove_ds:remove.py:92 Removing /var/lib/dirsrv/slapd-standalone/bak DEBUG  lib389.remove_ds:remove.py:92 Removing /etc/dirsrv/slapd-standalone DEBUG  lib389.remove_ds:remove.py:92 Removing /etc/dirsrv/slapd-standalone DEBUG  lib389.remove_ds:remove.py:92 Removing /var/lib/dirsrv/slapd-standalone/db DEBUG  lib389.remove_ds:remove.py:92 Removing /var/lib/dirsrv/slapd-standalone/db DEBUG  lib389.remove_ds:remove.py:92 Removing /var/lib/dirsrv/slapd-standalone/db/../ DEBUG  lib389.remove_ds:remove.py:92 Removing /var/lib/dirsrv/slapd-standalone/changelogdb DEBUG  lib389.remove_ds:remove.py:92 Removing /var/lib/dirsrv/slapd-standalone/ldif DEBUG  lib389.remove_ds:remove.py:92 Removing /var/lock/dirsrv/slapd-standalone DEBUG  lib389.remove_ds:remove.py:92 Removing /var/log/dirsrv/slapd-standalone DEBUG  lib389.remove_ds:remove.py:92 Removing /usr/lib64/dirsrv/slapd-standalone DEBUG  lib389.remove_ds:remove.py:92 Removing /etc/sysconfig/dirsrv-standalone DEBUG  lib389.remove_ds:remove.py:101 Removing the systemd symlink DEBUG  lib389.remove_ds:remove.py:108 CMD: systemctl disable dirsrv@standalone ; STDOUT: ; STDERR: Removed /etc/systemd/system/multi-user.target.wants/dirsrv@standalone.service. DEBUG  lib389.remove_ds:remove.py:110 Removing /etc/tmpfiles.d/dirsrv-standalone.conf DEBUG  lib389.remove_ds:remove.py:119 Removing the port labels DEBUG  lib389.remove_ds:remove.py:149 Moving /etc/dirsrv/slapd-standalone to /etc/dirsrv/slapd-standalone.removed DEBUG  lib389.remove_ds:remove.py:159 Complete
Passed suites/setup_ds/dscreate_test.py::test_setup_ds_inf_minimal 0.00
-------------------------------Captured log setup-------------------------------
DEBUG  lib389:dscreate_test.py:42 Instance allocated DEBUG  lib389:__init__.py:554 Allocate <class 'lib389.DirSrv'> with None DEBUG  lib389:__init__.py:577 Allocate <class 'lib389.DirSrv'> with ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:54321 DEBUG  lib389:__init__.py:602 Allocate <class 'lib389.DirSrv'> with ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:54321 DEBUG  lib389:__init__.py:742 list instance not found in /etc/dirsrv/slapd-standalone/dse.ldif: standalone
Passed suites/setup_ds/remove_test.py::test_basic[True] 3.61
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/setup_ds/remove_test.py::test_basic[False] 4.40
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/state/mmt_state_test.py::test_check_desc_attr_state[description-Test1usr1-ldap.MOD_ADD-exp_values0-vucsn] 0.07
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 7fa24d2d-08f9-47ba-a2c9-b7ae7c0095b3 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 46a7e078-b033-4577-a4d8-c3c514d64238 / got description=7fa24d2d-08f9-47ba-a2c9-b7ae7c0095b3) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
-------------------------------Captured log call--------------------------------
INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:99 Add user: state1test INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:107 Check if list of description attrs present for: state1test INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:110 Checking for operational attributes INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: state1test
Passed suites/state/mmt_state_test.py::test_check_desc_attr_state[description-Test1usr2-ldap.MOD_ADD-exp_values1-vucsn] 0.02
-------------------------------Captured log call--------------------------------
INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:99 Add user: state1test INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:107 Check if list of description attrs present for: state1test INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:110 Checking for operational attributes INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: state1test
Passed suites/state/mmt_state_test.py::test_check_desc_attr_state[description-Test1usr3-ldap.MOD_ADD-exp_values2-vucsn] 0.02
-------------------------------Captured log call--------------------------------
INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:99 Add user: state1test INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:107 Check if list of description attrs present for: state1test INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:110 Checking for operational attributes INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: state1test
Passed suites/state/mmt_state_test.py::test_check_desc_attr_state[description-Test1usr4-ldap.MOD_REPLACE-exp_values3-adcsn] 0.01
-------------------------------Captured log call--------------------------------
INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:99 Add user: state1test INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:107 Check if list of description attrs present for: state1test INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:110 Checking for operational attributes INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: state1test
Passed suites/state/mmt_state_test.py::test_check_desc_attr_state[description-Test1usr4-ldap.MOD_DELETE-exp_values4-vdcsn] 0.02
-------------------------------Captured log call--------------------------------
INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:99 Add user: state1test INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:107 Check if list of description attrs present for: state1test INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:110 Checking for operational attributes INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: state1test
Passed suites/state/mmt_state_test.py::test_check_cn_attr_state[cn-TestCN1-ldap.MOD_ADD-exp_values0-vucsn] 0.05
-------------------------------Captured log call--------------------------------
INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:153 Add user: TestCNusr1 INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:166 Check if list of cn attrs present for: TestCNusr1 INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:168 Checking for operational attributes INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: TestCNusr1
Passed suites/state/mmt_state_test.py::test_check_cn_attr_state[cn-TestCN2-ldap.MOD_ADD-exp_values1-vucsn] 0.04
-------------------------------Captured log call--------------------------------
INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:153 Add user: TestCNusr1 INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:166 Check if list of cn attrs present for: TestCNusr1 INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:168 Checking for operational attributes INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: TestCNusr1
Passed suites/state/mmt_state_test.py::test_check_cn_attr_state[cn-TestnewCN3-ldap.MOD_REPLACE-exp_values2-adcsn] 0.02
-------------------------------Captured log call--------------------------------
INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:153 Add user: TestCNusr1 INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:166 Check if list of cn attrs present for: TestCNusr1 INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:168 Checking for operational attributes INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: TestCNusr1
Passed suites/state/mmt_state_test.py::test_check_cn_attr_state[cn-TestnewCN3-ldap.MOD_DELETE-None-None] 0.04
-------------------------------Captured log call--------------------------------
INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:153 Add user: TestCNusr1 INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:168 Checking for operational attributes INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: TestCNusr1
Passed suites/state/mmt_state_test.py::test_check_single_value_attr_state[preferredlanguage-Chinese-ldap.MOD_REPLACE-exp_values0-vucsn] 0.05
-------------------------------Captured log call--------------------------------
INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:206 Add user: Langusr1 INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:219 Check if list of cn attrs present for: Langusr1 INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:221 Checking for operational attributes INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: Langusr1
Passed suites/state/mmt_state_test.py::test_check_single_value_attr_state[preferredlanguage-French-ldap.MOD_ADD-None-None] 0.03
-------------------------------Captured log call--------------------------------
INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:206 Add user: Langusr1 INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:221 Checking for operational attributes INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: Langusr1
Passed suites/state/mmt_state_test.py::test_check_single_value_attr_state[preferredlanguage-German-ldap.MOD_REPLACE-exp_values2-adcsn] 0.02
-------------------------------Captured log call--------------------------------
INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:206 Add user: Langusr1 INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:219 Check if list of cn attrs present for: Langusr1 INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:221 Checking for operational attributes INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: Langusr1
Passed suites/state/mmt_state_test.py::test_check_single_value_attr_state[preferredlanguage-German-ldap.MOD_DELETE-exp_values3-vdcsn] 0.01
-------------------------------Captured log call--------------------------------
INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:206 Add user: Langusr1 INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:219 Check if list of cn attrs present for: Langusr1 INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:221 Checking for operational attributes INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: Langusr1
Passed suites/state/mmt_state_test.py::test_check_subtype_attr_state[roomnumber;office-Tower1-ldap.MOD_ADD-exp_values0-vucsn] 0.04
-------------------------------Captured log call--------------------------------
INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:276 Add user: roomoffice1usr INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:285 Check if list of roomnumber;office attributes are present for a given entry INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:287 Checking if operational attributes are present for cn INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: roomoffice1usr
Passed suites/state/mmt_state_test.py::test_check_subtype_attr_state[roomnumber;office-Tower2-ldap.MOD_ADD-exp_values1-vucsn] 0.02
-------------------------------Captured log call--------------------------------
INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:276 Add user: roomoffice1usr INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:285 Check if list of roomnumber;office attributes are present for a given entry INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:287 Checking if operational attributes are present for cn INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: roomoffice1usr
Passed suites/state/mmt_state_test.py::test_check_subtype_attr_state[roomnumber;office-Tower3-ldap.MOD_ADD-exp_values2-vucsn] 0.01
-------------------------------Captured log call--------------------------------
INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:276 Add user: roomoffice1usr INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:285 Check if list of roomnumber;office attributes are present for a given entry INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:287 Checking if operational attributes are present for cn INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: roomoffice1usr
Passed suites/state/mmt_state_test.py::test_check_subtype_attr_state[roomnumber;office-Tower4-ldap.MOD_REPLACE-exp_values3-adcsn] 0.01
-------------------------------Captured log call--------------------------------
INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:276 Add user: roomoffice1usr INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:285 Check if list of roomnumber;office attributes are present for a given entry INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:287 Checking if operational attributes are present for cn INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: roomoffice1usr
Passed suites/state/mmt_state_test.py::test_check_subtype_attr_state[roomnumber;office-Tower4-ldap.MOD_DELETE-exp_values4-vucsn] 0.01
-------------------------------Captured log call--------------------------------
INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:276 Add user: roomoffice1usr INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:285 Check if list of roomnumber;office attributes are present for a given entry INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:287 Checking if operational attributes are present for cn INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: roomoffice1usr
Passed suites/state/mmt_state_test.py::test_check_jpeg_attr_state[jpegphoto-thedeadbeef1-ldap.MOD_ADD-exp_values0-vucsn] 0.05
-------------------------------Captured log call--------------------------------
INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:342 Add user: testJpeg1usr INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:351 Check if list of jpeg attributes are present for a given entry INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:353 Checking if operational attributes are present for cn INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: testJpeg1usr
Passed suites/state/mmt_state_test.py::test_check_jpeg_attr_state[jpegphoto-thedeadbeef2-ldap.MOD_ADD-exp_values1-vucsn] 0.02
-------------------------------Captured log call--------------------------------
INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:342 Add user: testJpeg1usr INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:351 Check if list of jpeg attributes are present for a given entry INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:353 Checking if operational attributes are present for cn INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: testJpeg1usr
Passed suites/state/mmt_state_test.py::test_check_jpeg_attr_state[jpegphoto-thedeadbeef3-ldap.MOD_ADD-exp_values2-vucsn] 0.01
-------------------------------Captured log call--------------------------------
INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:342 Add user: testJpeg1usr INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:351 Check if list of jpeg attributes are present for a given entry INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:353 Checking if operational attributes are present for cn INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: testJpeg1usr
Passed suites/state/mmt_state_test.py::test_check_jpeg_attr_state[jpegphoto-thedeadbeef2-ldap.MOD_REPLACE-exp_values3-adcsn] 0.02
-------------------------------Captured log call--------------------------------
INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:342 Add user: testJpeg1usr INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:351 Check if list of jpeg attributes are present for a given entry INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:353 Checking if operational attributes are present for cn INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: testJpeg1usr
Passed suites/state/mmt_state_test.py::test_check_jpeg_attr_state[jpegphoto-thedeadbeef2-ldap.MOD_DELETE-exp_values4-vdcsn] 0.01
-------------------------------Captured log call--------------------------------
INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:342 Add user: testJpeg1usr INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:351 Check if list of jpeg attributes are present for a given entry INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:353 Checking if operational attributes are present for cn INFO  tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: testJpeg1usr
Passed suites/syncrepl_plugin/basic_test.py::test_syncrepl_basic 4.81
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/syncrepl_plugin/basic_test.py::test_sync_repl_mep 25.57
------------------------------Captured stdout call------------------------------
syncrepl_poll: LDAP error (%s) {'result': -1, 'desc': "Can't contact LDAP server", 'ctrls': []}
Passed suites/syncrepl_plugin/openldap_test.py::test_syncrepl_openldap 4.94
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/syntax/acceptance_test.py::test_valid 6.02
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  lib389.topologies.tests.suites.syntax.acceptance_test:acceptance_test.py:54 Clean the error log INFO  lib389.topologies.tests.suites.syntax.acceptance_test:acceptance_test.py:58 Attempting to add task entry... INFO  lib389.topologies.tests.suites.syntax.acceptance_test:acceptance_test.py:65 Found 0 invalid entries - Success
Passed suites/syntax/acceptance_test.py::test_invalid_uidnumber 3.98
-------------------------------Captured log call--------------------------------
INFO  lib389.topologies.tests.suites.syntax.acceptance_test:acceptance_test.py:91 Clean the error log INFO  lib389.topologies.tests.suites.syntax.acceptance_test:acceptance_test.py:98 Attempting to add task entry... INFO  lib389.topologies.tests.suites.syntax.acceptance_test:acceptance_test.py:105 Found an invalid entry with wrong uidNumber - Success
Passed suites/syntax/mr_test.py::test_sss_mr 6.25
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.suites.syntax.mr_test:mr_test.py:42 Creating LDIF... INFO  tests.suites.syntax.mr_test:mr_test.py:47 Importing LDIF... INFO  tests.suites.syntax.mr_test:mr_test.py:52 Search using server side sorting using undefined mr in the attr... INFO  tests.suites.syntax.mr_test:mr_test.py:62 Test PASSED
Passed suites/tls/cipher_test.py::test_long_cipher_list 14.17
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/tls/ssl_version_test.py::test_ssl_version_range 59.93
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.suites.tls.ssl_version_test:ssl_version_test.py:36 default min: TLS1.2 max: TLS1.3
Passed suites/tls/tls_cert_namespace_test.py::test_pem_cert_in_private_namespace 8.24
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:54 Enable TLS INFO  tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:57 Checking PrivateTmp value INFO  tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:59 Command used : systemctl show -p PrivateTmp dirsrv@standalone1.service INFO  tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:63 Check files in private /tmp INFO  tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:68 Check that Self-Signed-CA.pem is present in private /tmp INFO  tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:68 Check that Server-Cert-Key.pem is present in private /tmp INFO  tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:68 Check that Server-Cert.pem is present in private /tmp INFO  tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:71 Check instance cert directory INFO  tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:75 Check that Self-Signed-CA.pem is not present in /etc/dirsrv/slapd-standalone1/ directory INFO  tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:75 Check that Server-Cert-Key.pem is not present in /etc/dirsrv/slapd-standalone1/ directory INFO  tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:75 Check that Server-Cert.pem is not present in /etc/dirsrv/slapd-standalone1/ directory
Passed suites/tls/tls_cert_namespace_test.py::test_cert_category_authority 9.06
-------------------------------Captured log call--------------------------------
INFO  tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:106 Enable TLS INFO  tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:109 Get certificate path INFO  tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:115 Check that Self-Signed-CA.pem is present INFO  tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:119 Trust the certificate INFO  tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:122 Search if our certificate has category: authority
Passed suites/tls/tls_check_crl_test.py::test_tls_check_crl 8.51
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed suites/tls/tls_ldaps_only_test.py::test_tls_ldaps_only 19.89
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed tickets/ticket47560_test.py::test_ticket47560 17.13
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  lib389:tasks.py:877 fixupMemberOf task fixupmemberof_10282020_233738 for basedn dc=example,dc=com completed successfully
Passed tickets/ticket47573_test.py::test_ticket47573_init 0.03
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for consumer1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:169 Joining consumer consumer1 from master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 28e1fa79-c997-4122-919c-b4510a7118dd / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is working INFO  lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 INFO  lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 already exists
-------------------------------Captured log call--------------------------------
DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:123 test_ticket47573_init topology_m1c1 <lib389.topologies.TopologyMain object at 0x7fd16762ee80> (master <lib389.DirSrv object at 0x7fd16762a8b0>, consumer <lib389.DirSrv object at 0x7fd16762ae50>
Passed tickets/ticket47573_test.py::test_ticket47573_one 1.23
-------------------------------Captured log call--------------------------------
DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:147 test_ticket47573_one topology_m1c1 <lib389.topologies.TopologyMain object at 0x7fd16762ee80> (master <lib389.DirSrv object at 0x7fd16762a8b0>, consumer <lib389.DirSrv object at 0x7fd16762ae50> DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:113 trigger_schema_push: receive 0 (expected 1) DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:161 test_ticket47573_one master_schema_csn=b'5f9a3929000000000000' DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:162 ctest_ticket47573_one onsumer_schema_csn=b'5f9a3929000000000000' DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:47 _pattern_errorlog: start at offset 0 DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [38] 389-Directory/1.4.5.0 B2020.303.0011 DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [95] localhost.localdomain:39001 (/etc/dirsrv/slapd-master1) DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [96] DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [200] [28/Oct/2020:23:37:53.556540904 -0400] - INFO - main - 389-Directory/1.4.5.0 B2020.303.0011 starting up DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [308] [28/Oct/2020:23:37:53.559259155 -0400] - INFO - main - Setting the maximum file descriptor limit to: 524288 DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [416] [28/Oct/2020:23:37:54.329707689 -0400] - INFO - PBKDF2_SHA256 - Based on CPU performance, chose 2048 rounds DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [530] [28/Oct/2020:23:37:54.337345983 -0400] - INFO - bdb_config_upgrade_dse_info - create config entry from old config DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [632] [28/Oct/2020:23:37:54.344941797 -0400] - NOTICE - bdb_start_autotune - found 7980868k physical memory DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [728] [28/Oct/2020:23:37:54.348530923 -0400] - NOTICE - bdb_start_autotune - found 7309896k available DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [835] [28/Oct/2020:23:37:54.351625254 -0400] - NOTICE - bdb_start_autotune - cache autosizing: db cache: 498804k DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [938] [28/Oct/2020:23:37:54.355586954 -0400] - NOTICE - bdb_start_autotune - total cache size: 408620441 B; DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [1074] [28/Oct/2020:23:37:54.591985544 -0400] - INFO - slapd_daemon - slapd started. Listening on All Interfaces port 39001 for LDAP requests DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [1199] [28/Oct/2020:23:37:54.597757732 -0400] - INFO - slapd_daemon - Listening on /var/run/slapd-master1.socket for LDAPI requests DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [1358] [28/Oct/2020:23:37:54.618405436 -0400] - INFO - postop_modify_config_dse - The change of nsslapd-securePort will not take effect until the server is restarted DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [1475] [28/Oct/2020:23:37:55.368893651 -0400] - INFO - ldbm_instance_config_cachememsize_set - force a minimal value 512000 DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [1653] [28/Oct/2020:23:37:55.825410471 -0400] - INFO - op_thread_cleanup - slapd shutting down - signaling operation threads - op stack size 2 max work q size 2 max work q stack size 2 DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [1783] [28/Oct/2020:23:37:55.832808087 -0400] - INFO - slapd_daemon - slapd shutting down - closing down internal subsystems and plugins DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [1886] [28/Oct/2020:23:37:55.839769338 -0400] - INFO - bdb_pre_close - Waiting for 4 database threads to stop DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [1983] [28/Oct/2020:23:37:57.050442973 -0400] - INFO - bdb_pre_close - All database threads now stopped DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [2094] [28/Oct/2020:23:37:57.075726730 -0400] - INFO - ldbm_back_instance_set_destructor - Set of instances destroyed DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [2255] [28/Oct/2020:23:37:57.079116488 -0400] - INFO - connection_post_shutdown_cleanup - slapd shutting down - freed 2 work q stack objects - freed 2 op stack objects DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [2325] [28/Oct/2020:23:37:57.081887417 -0400] - INFO - main - slapd stopped. DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [2429] [28/Oct/2020:23:37:58.432887264 -0400] - INFO - main - 389-Directory/1.4.5.0 B2020.303.0011 starting up DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [2537] [28/Oct/2020:23:37:58.444009768 -0400] - INFO - main - Setting the maximum file descriptor limit to: 524288 DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [2645] [28/Oct/2020:23:37:59.238199910 -0400] - INFO - PBKDF2_SHA256 - Based on CPU performance, chose 2048 rounds DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [2762] [28/Oct/2020:23:37:59.243603315 -0400] - INFO - ldbm_instance_config_cachememsize_set - force a minimal value 512000 DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [2864] [28/Oct/2020:23:37:59.249165275 -0400] - NOTICE - bdb_start_autotune - found 7980868k physical memory DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [2960] [28/Oct/2020:23:37:59.251704503 -0400] - NOTICE - bdb_start_autotune - found 7309508k available DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [3067] [28/Oct/2020:23:37:59.254322615 -0400] - NOTICE - bdb_start_autotune - cache autosizing: db cache: 498804k DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [3197] [28/Oct/2020:23:37:59.257530965 -0400] - NOTICE - bdb_start_autotune - cache autosizing: userRoot entry cache (1 total): 1376256k DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [3323] [28/Oct/2020:23:37:59.261413750 -0400] - NOTICE - bdb_start_autotune - cache autosizing: userRoot dn cache (1 total): 196608k DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [3427] [28/Oct/2020:23:37:59.264523961 -0400] - NOTICE - bdb_start_autotune - total cache size: 1834683801 B; DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [3563] [28/Oct/2020:23:37:59.357133282 -0400] - INFO - slapd_daemon - slapd started. Listening on All Interfaces port 39001 for LDAP requests DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [3688] [28/Oct/2020:23:37:59.360273610 -0400] - INFO - slapd_daemon - Listening on /var/run/slapd-master1.socket for LDAPI requests DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [3876] [28/Oct/2020:23:38:08.811782044 -0400] - NOTICE - NSMMReplicationPlugin - changelog program - _cl5ConstructRUV - Rebuilding the replication changelog RUV, this may take several minutes... DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [4056] [28/Oct/2020:23:38:08.815736985 -0400] - NOTICE - NSMMReplicationPlugin - changelog program - _cl5ConstructRUV - Rebuilding replication changelog RUV complete. Result 0 (Success) DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [4244] [28/Oct/2020:23:38:08.819284360 -0400] - NOTICE - NSMMReplicationPlugin - changelog program - _cl5ConstructRUV - Rebuilding the replication changelog RUV, this may take several minutes... DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [4424] [28/Oct/2020:23:38:08.824240539 -0400] - NOTICE - NSMMReplicationPlugin - changelog program - _cl5ConstructRUV - Rebuilding replication changelog RUV complete. Result 0 (Success) DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [4595] [28/Oct/2020:23:38:10.072190666 -0400] - INFO - NSMMReplicationPlugin - repl5_tot_run - Beginning total update of replica "agmt="cn=temp_201" (ci-vm-10-0-136-251:39201)". DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [4779] [28/Oct/2020:23:38:10.075881958 -0400] - NOTICE - NSMMReplicationPlugin - replica_subentry_check - Need to create replication keep alive entry <cn=repl keep alive 1,dc=example,dc=com> DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [4924] [28/Oct/2020:23:38:10.078592494 -0400] - INFO - NSMMReplicationPlugin - replica_subentry_create - add dn: cn=repl keep alive 1,dc=example,dc=com DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [4941] objectclass: top DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [4967] objectclass: ldapsubentry DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [4997] objectclass: extensibleObject DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [5019] cn: repl keep alive 1 DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [5206] [28/Oct/2020:23:38:12.716003752 -0400] - INFO - NSMMReplicationPlugin - repl5_tot_run - Finished total update of replica "agmt="cn=temp_201" (ci-vm-10-0-136-251:39201)". Sent 16 entries. DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [5206] DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:59 _pattern_errorlog: end at offset 5206
Passed tickets/ticket47573_test.py::test_ticket47573_two 1.27
-------------------------------Captured log call--------------------------------
DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:113 trigger_schema_push: receive b'1' (expected 2) DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:192 test_ticket47573_two master_schema_csn=b'5f9a392b000000000000' DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:193 test_ticket47573_two consumer_schema_csn=b'5f9a392b000000000000' DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:47 _pattern_errorlog: start at offset 5207 DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [5207] DEBUG  tests.tickets.ticket47573_test:ticket47573_test.py:59 _pattern_errorlog: end at offset 5207
Passed tickets/ticket47573_test.py::test_ticket47573_three 1.01
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket47573_test:ticket47573_test.py:228 Testcase PASSED
Passed tickets/ticket47619_test.py::test_ticket47619_init 7.46
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for consumer1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:169 Joining consumer consumer1 from master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect a75eeb14-cd17-48f1-b12a-00b616b9035a / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is working INFO  lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 INFO  lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 already exists
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47619_test.py:48 test_ticket47619_init topology_m1c1 <lib389.topologies.TopologyMain object at 0x7fd16766edc0> INFO  lib389:ticket47619_test.py:61 test_ticket47619_init: 100 entries ADDed other_entry[0..99]
Passed tickets/ticket47619_test.py::test_ticket47619_create_index 3.45
-------------------------------Captured log call--------------------------------
INFO  lib389:backend.py:80 List backend with suffix=cn=changelog INFO  lib389:backend.py:80 List backend with suffix=cn=changelog INFO  lib389:backend.py:80 List backend with suffix=cn=changelog INFO  lib389:backend.py:80 List backend with suffix=cn=changelog INFO  lib389:backend.py:80 List backend with suffix=cn=changelog INFO  lib389:backend.py:80 List backend with suffix=cn=changelog INFO  lib389:backend.py:80 List backend with suffix=cn=changelog INFO  lib389:backend.py:80 List backend with suffix=cn=changelog
Passed tickets/ticket47619_test.py::test_ticket47619_reindex 16.36
-------------------------------Captured log call--------------------------------
INFO  lib389:tasks.py:798 Index task index_attrs_10282020_233901 completed successfully INFO  lib389:tasks.py:798 Index task index_attrs_10282020_233903 completed successfully INFO  lib389:tasks.py:798 Index task index_attrs_10282020_233905 completed successfully INFO  lib389:tasks.py:798 Index task index_attrs_10282020_233907 completed successfully INFO  lib389:tasks.py:798 Index task index_attrs_10282020_233909 completed successfully INFO  lib389:tasks.py:798 Index task index_attrs_10282020_233911 completed successfully INFO  lib389:tasks.py:798 Index task index_attrs_10282020_233913 completed successfully INFO  lib389:tasks.py:798 Index task index_attrs_10282020_233915 completed successfully
Passed tickets/ticket47619_test.py::test_ticket47619_check_indexed_search 0.10
No log output captured.
Passed tickets/ticket47640_test.py::test_ticket47640 0.33
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket47640_test:ticket47640_test.py:65 Add operation correctly rejected. INFO  tests.tickets.ticket47640_test:ticket47640_test.py:75 Test complete
Passed tickets/ticket47653MMR_test.py::test_ticket47653_init 0.21
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b7cc36bd-2954-42d8-be42-893d30da91ca / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 6414a431-450a-4560-b835-7c10e42c7667 / got description=b7cc36bd-2954-42d8-be42-893d30da91ca) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47653MMR_test.py:72 Add OCticket47653 that allows 'member' attribute INFO  lib389:ticket47653MMR_test.py:77 Add cn=bind_entry, dc=example,dc=com
Passed tickets/ticket47653MMR_test.py::test_ticket47653_add 5.25
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47653MMR_test.py:114 ######################### ADD ###################### INFO  lib389:ticket47653MMR_test.py:117 Bind as cn=bind_entry, dc=example,dc=com INFO  lib389:ticket47653MMR_test.py:147 Try to add Add cn=test_entry, dc=example,dc=com (aci is missing): dn: cn=test_entry, dc=example,dc=com cn: test_entry member: cn=bind_entry, dc=example,dc=com objectclass: top objectclass: person objectclass: OCticket47653 postalAddress: here postalCode: 1234 sn: test_entry INFO  lib389:ticket47653MMR_test.py:151 Exception (expected): INSUFFICIENT_ACCESS INFO  lib389:ticket47653MMR_test.py:155 Bind as cn=Directory Manager and add the ADD SELFDN aci INFO  lib389:ticket47653MMR_test.py:168 Bind as cn=bind_entry, dc=example,dc=com INFO  lib389:ticket47653MMR_test.py:173 Try to add Add cn=test_entry, dc=example,dc=com (member is missing) INFO  lib389:ticket47653MMR_test.py:181 Exception (expected): INSUFFICIENT_ACCESS INFO  lib389:ticket47653MMR_test.py:188 Try to add Add cn=test_entry, dc=example,dc=com (with several member values) INFO  lib389:ticket47653MMR_test.py:191 Exception (expected): INSUFFICIENT_ACCESS INFO  lib389:ticket47653MMR_test.py:195 Try to add Add cn=test_entry, dc=example,dc=com should be successful INFO  lib389:ticket47653MMR_test.py:206 Try to retrieve cn=test_entry, dc=example,dc=com from Master2 INFO  lib389:ticket47653MMR_test.py:218 Update cn=test_entry, dc=example,dc=com on M2
Passed tickets/ticket47653MMR_test.py::test_ticket47653_modify 4.21
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47653MMR_test.py:248 Bind as cn=bind_entry, dc=example,dc=com INFO  lib389:ticket47653MMR_test.py:251 ######################### MODIFY ###################### INFO  lib389:ticket47653MMR_test.py:255 Try to modify cn=test_entry, dc=example,dc=com (aci is missing) INFO  lib389:ticket47653MMR_test.py:259 Exception (expected): INSUFFICIENT_ACCESS INFO  lib389:ticket47653MMR_test.py:263 Bind as cn=Directory Manager and add the WRITE SELFDN aci INFO  lib389:ticket47653MMR_test.py:277 M1: Bind as cn=bind_entry, dc=example,dc=com INFO  lib389:ticket47653MMR_test.py:282 M1: Try to modify cn=test_entry, dc=example,dc=com. It should succeeds INFO  lib389:ticket47653MMR_test.py:286 M1: Bind as cn=Directory Manager INFO  lib389:ticket47653MMR_test.py:289 M1: Check the update of cn=test_entry, dc=example,dc=com INFO  lib389:ticket47653MMR_test.py:295 M2: Bind as cn=Directory Manager INFO  lib389:ticket47653MMR_test.py:297 M2: Try to retrieve cn=test_entry, dc=example,dc=com INFO  lib389:ticket47653MMR_test.py:311 M2: Update cn=test_entry, dc=example,dc=com (bound as cn=bind_entry, dc=example,dc=com) INFO  lib389:ticket47653MMR_test.py:329 M1: Bind as cn=Directory Manager INFO  lib389:ticket47653MMR_test.py:331 M1: Check cn=test_entry, dc=example,dc=com.postalCode=1929)
Passed tickets/ticket47676_test.py::test_ticket47676_init 0.53
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 113368a0-3e2e-4aff-b096-96fbb88f1eaf / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect b7ed1a72-f853-4e1c-9a2e-7f779b97af49 / got description=113368a0-3e2e-4aff-b096-96fbb88f1eaf) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47676_test.py:88 Add OCticket47676 that allows 'member' attribute INFO  lib389:ticket47676_test.py:93 Add cn=bind_entry, dc=example,dc=com
Passed tickets/ticket47676_test.py::test_ticket47676_skip_oc_at 3.74
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47676_test.py:120 ######################### ADD ###################### INFO  lib389:ticket47676_test.py:123 Bind as cn=Directory Manager and add the add the entry with specific oc INFO  lib389:ticket47676_test.py:140 Try to add Add cn=test_entry, dc=example,dc=com should be successful INFO  lib389:ticket47676_test.py:147 Try to retrieve cn=test_entry, dc=example,dc=com from Master2 INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 95b69a6b-0d3c-46f3-86a2-5f94a36fa42b / got description=b7ed1a72-f853-4e1c-9a2e-7f779b97af49) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389:ticket47676_test.py:152 Update cn=test_entry, dc=example,dc=com on M2 INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 4a7f5b5e-6106-49ed-97d5-608cab3f2bb9 / got description=95b69a6b-0d3c-46f3-86a2-5f94a36fa42b) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working
Passed tickets/ticket47676_test.py::test_ticket47676_reject_action 12.00
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47676_test.py:163 ######################### REJECT ACTION ###################### INFO  lib389:ticket47676_test.py:177 Add OC2ticket47676 on M1 INFO  lib389:ticket47676_test.py:182 Check OC2ticket47676 is in M1 INFO  lib389:ticket47676_test.py:193 Update cn=test_entry, dc=example,dc=com on M1 INFO  lib389:ticket47676_test.py:198 Check updated cn=test_entry, dc=example,dc=com on M2 INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 2c32397d-47e6-4935-a3fa-9436d44ab3ba / got description=4a7f5b5e-6106-49ed-97d5-608cab3f2bb9) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389:ticket47676_test.py:205 Check OC2ticket47676 is not in M2 INFO  lib389:ticket47676_test.py:215 ######################### NO MORE REJECT ACTION ###################### INFO  lib389:ticket47676_test.py:226 Update cn=test_entry, dc=example,dc=com on M1 INFO  lib389:ticket47676_test.py:231 Check updated cn=test_entry, dc=example,dc=com on M2 INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c67d259f-eb30-4e31-a655-42f1fd905f37 / got description=2c32397d-47e6-4935-a3fa-9436d44ab3ba) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389:ticket47676_test.py:237 Check OC2ticket47676 is in M2
Passed tickets/ticket47714_test.py::test_ticket47714_init 0.07
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47714_test.py:38 ############################################### INFO  lib389:ticket47714_test.py:39 ####### INFO  lib389:ticket47714_test.py:40 ####### Testing Ticket 47714 - [RFE] Update lastLoginTime also in Account Policy plugin if account lockout is based on passwordExpirationTime. INFO  lib389:ticket47714_test.py:41 ####### INFO  lib389:ticket47714_test.py:42 ############################################### INFO  lib389.utils:ticket47714_test.py:55 ######################### Adding Account Policy entry: cn=Account Inactivation Policy,dc=example,dc=com ###################### INFO  lib389.utils:ticket47714_test.py:60 ######################### Adding Test User entry: uid=ticket47714user,dc=example,dc=com ######################
Passed tickets/ticket47714_test.py::test_ticket47714_run_0 10.44
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47714_test.py:38 ############################################### INFO  lib389:ticket47714_test.py:39 ####### INFO  lib389:ticket47714_test.py:40 ####### Account Policy - No new attr alwaysRecordLoginAttr in config INFO  lib389:ticket47714_test.py:41 ####### INFO  lib389:ticket47714_test.py:42 ############################################### INFO  lib389.utils:ticket47714_test.py:96 ######################### Bind as uid=ticket47714user,dc=example,dc=com ###################### INFO  lib389.utils:ticket47714_test.py:109 ######################### Bind as uid=ticket47714user,dc=example,dc=com again ###################### INFO  lib389.utils:ticket47714_test.py:122 First lastLoginTime: b'20201029034115Z', Second lastLoginTime: b'20201029034117Z' INFO  lib389.utils:ticket47714_test.py:133 ######################### cn=config,cn=Account Policy Plugin,cn=plugins,cn=config ###################### INFO  lib389.utils:ticket47714_test.py:134 accountInactivityLimit: b'1' INFO  lib389.utils:ticket47714_test.py:135 ######################### cn=config,cn=Account Policy Plugin,cn=plugins,cn=config DONE ###################### INFO  lib389.utils:ticket47714_test.py:137 ######################### Bind as uid=ticket47714user,dc=example,dc=com again to fail ###################### INFO  lib389.utils:ticket47714_test.py:141 CONSTRAINT VIOLATION Constraint violation INFO  lib389.utils:ticket47714_test.py:142 uid=ticket47714user,dc=example,dc=com was successfully inactivated.
Passed tickets/ticket47714_test.py::test_ticket47714_run_1 5.49
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47714_test.py:38 ############################################### INFO  lib389:ticket47714_test.py:39 ####### INFO  lib389:ticket47714_test.py:40 ####### Account Policy - With new attr alwaysRecordLoginAttr in config INFO  lib389:ticket47714_test.py:41 ####### INFO  lib389:ticket47714_test.py:42 ############################################### INFO  lib389.utils:ticket47714_test.py:179 ######################### Bind as uid=ticket47714user,dc=example,dc=com ###################### INFO  lib389.utils:ticket47714_test.py:191 ######################### Bind as uid=ticket47714user,dc=example,dc=com again ###################### INFO  lib389.utils:ticket47714_test.py:203 First lastLoginTime: b'20201029034125Z', Second lastLoginTime: b'20201029034126Z' INFO  lib389:ticket47714_test.py:206 ticket47714 was successfully verified.
Passed tickets/ticket47721_test.py::test_ticket47721_init 0.91
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 9d9e8c80-b6f7-49a5-bc2c-819f585ee790 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 0278a0cd-fe99-4b2f-bed4-38ecfbb7faa1 / got description=9d9e8c80-b6f7-49a5-bc2c-819f585ee790) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47721_test.py:98 Add cn=bind_entry, dc=example,dc=com
Passed tickets/ticket47721_test.py::test_ticket47721_0 1.22
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 7a098732-e73c-4fbc-8e54-21722cdd032b / got description=0278a0cd-fe99-4b2f-bed4-38ecfbb7faa1) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working
Passed tickets/ticket47721_test.py::test_ticket47721_1 3.55
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket47721_test:ticket47721_test.py:127 Running test 1... INFO  lib389:ticket47721_test.py:132 Add (M2) b"( ATticket47721-oid NAME 'ATticket47721' DESC 'test AT ticket 47721' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 X-ORIGIN ( 'Test 47721' 'user defined' ) )" INFO  lib389:ticket47721_test.py:136 Chg (M2) b"( 2.16.840.1.113730.3.1.569 NAME 'cosPriority' DESC 'Netscape defined attribute type' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 X-ORIGIN 'Netscape Directory Server' )" INFO  lib389:ticket47721_test.py:140 Add (M2) b"( OCticket47721-oid NAME 'OCticket47721' DESC 'An group of related automount objects' SUP top STRUCTURAL MUST ou X-ORIGIN 'draft-howard-rfc2307bis' )" INFO  lib389:ticket47721_test.py:144 Chg (M2) b"( 5.3.6.1.1.1.2.0 NAME 'trustAccount' DESC 'Sets trust accounts information' SUP top AUXILIARY MUST trustModel MAY ( accessTo $ ou ) X-ORIGIN 'nss_ldap/pam_ldap' )" INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect cc544efc-8599-4004-8933-ed4a5b748132 / got description=7a098732-e73c-4fbc-8e54-21722cdd032b) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working DEBUG  tests.tickets.ticket47721_test:ticket47721_test.py:158 Master 1 schemaCSN: b'5f9a3a08000000000000' DEBUG  tests.tickets.ticket47721_test:ticket47721_test.py:159 Master 2 schemaCSN: b'5f9a3a08000000000000'
Passed tickets/ticket47721_test.py::test_ticket47721_2 3.25
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket47721_test:ticket47721_test.py:163 Running test 2... INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect cb68c05b-08e8-4946-a0b5-9a83330a5a28 / got description=cc544efc-8599-4004-8933-ed4a5b748132) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working DEBUG  tests.tickets.ticket47721_test:ticket47721_test.py:176 Master 1 schemaCSN: b'5f9a3a08000000000000' DEBUG  tests.tickets.ticket47721_test:ticket47721_test.py:177 Master 2 schemaCSN: b'5f9a3a08000000000000'
Passed tickets/ticket47721_test.py::test_ticket47721_3 13.83
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket47721_test:ticket47721_test.py:195 Running test 3... INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:ticket47721_test.py:203 Update schema (M2) b"( ATtest3-oid NAME 'ATtest3' DESC 'test AT ticket 47721' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 X-ORIGIN ( 'Test 47721' 'user defined' ) )" INFO  lib389:ticket47721_test.py:208 Update schema (M2) b"( OCtest3-oid NAME 'OCtest3' DESC 'An group of related automount objects' SUP top STRUCTURAL MUST ou X-ORIGIN 'draft-howard-rfc2307bis' )" INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect e7a866c2-8ac0-4ac5-8811-4aa77540362a / got description=cb68c05b-08e8-4946-a0b5-9a83330a5a28) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working DEBUG  tests.tickets.ticket47721_test:ticket47721_test.py:223 Master 1 schemaCSN: b'5f9a3a08000000000000' DEBUG  tests.tickets.ticket47721_test:ticket47721_test.py:224 Master 2 schemaCSN: b'5f9a3a15000000000000'
Passed tickets/ticket47721_test.py::test_ticket47721_4 4.60
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket47721_test:ticket47721_test.py:245 Running test 4... INFO  lib389:ticket47721_test.py:248 Update schema (M1) b"( ATtest4-oid NAME 'ATtest4' DESC 'test AT ticket 47721' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 X-ORIGIN ( 'Test 47721' 'user defined' ) )" INFO  lib389:ticket47721_test.py:252 Update schema (M1) b"( OCtest4-oid NAME 'OCtest4' DESC 'An group of related automount objects' SUP top STRUCTURAL MUST ou X-ORIGIN 'draft-howard-rfc2307bis' )" INFO  lib389:ticket47721_test.py:255 trigger replication M1->M2: to update the schema INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 63b8aac6-f3f6-40c1-a980-17b1f75fdee9 / got description=e7a866c2-8ac0-4ac5-8811-4aa77540362a) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389:ticket47721_test.py:264 trigger replication M1->M2: to push the schema INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 051134eb-bb6e-45fd-a2df-29cc8892c463 / got description=63b8aac6-f3f6-40c1-a980-17b1f75fdee9) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working DEBUG  tests.tickets.ticket47721_test:ticket47721_test.py:276 Master 1 schemaCSN: b'5f9a3a1d000000000000' DEBUG  tests.tickets.ticket47721_test:ticket47721_test.py:277 Master 2 schemaCSN: b'5f9a3a1d000000000000'
Passed tickets/ticket47787_test.py::test_ticket47787_init 2.37
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 902d1d42-cec9-4e82-badc-1ebd4c10cedc / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 7d2b3c95-ba57-4cae-a024-20cf3ead1c50 / got description=902d1d42-cec9-4e82-badc-1ebd4c10cedc) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47787_test.py:294 ######################### INITIALIZATION ###################### INFO  lib389:ticket47787_test.py:297 Add cn=bind_entry,dc=example,dc=com INFO  lib389:ticket47787_test.py:305 Add cn=staged user,dc=example,dc=com INFO  lib389:ticket47787_test.py:312 Add cn=accounts,dc=example,dc=com
Passed tickets/ticket47787_test.py::test_ticket47787_2 16.82
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47787_test.py:69 ############################################### INFO  lib389:ticket47787_test.py:70 ####### INFO  lib389:ticket47787_test.py:71 ####### test_ticket47787_2 INFO  lib389:ticket47787_test.py:72 ####### INFO  lib389:ticket47787_test.py:73 ############################################### INFO  lib389:ticket47787_test.py:59 Bind as cn=Directory Manager INFO  lib389:ticket47787_test.py:59 Bind as cn=Directory Manager INFO  lib389:ticket47787_test.py:159 ######################### Pause RA M1<->M2 ###################### INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:ticket47787_test.py:209 ######################### MOD cn=new_account18,cn=staged user,dc=example,dc=com (M2) ###################### INFO  lib389:ticket47787_test.py:200 ######################### DELETE new_account1 (M1) ###################### INFO  lib389:ticket47787_test.py:209 ######################### MOD cn=new_account18,cn=staged user,dc=example,dc=com (M2) ###################### INFO  lib389:ticket47787_test.py:209 ######################### MOD cn=new_account19,cn=staged user,dc=example,dc=com (M2) ###################### INFO  lib389:ticket47787_test.py:209 ######################### MOD cn=new_account1,cn=staged user,dc=example,dc=com (M2) ###################### INFO  lib389:ticket47787_test.py:209 ######################### MOD cn=new_account19,cn=staged user,dc=example,dc=com (M2) ###################### INFO  lib389:ticket47787_test.py:170 ######################### resume RA M1<->M2 ###################### INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:ticket47787_test.py:388 ######################### Check DEL replicated on M2 ###################### INFO  lib389:ticket47787_test.py:79 ######################### Tombstone on M1 ###################### INFO  lib389:ticket47787_test.py:92 ######################### Tombstone on M2 ###################### INFO  lib389:ticket47787_test.py:96 ######################### Description ###################### DEL M1 - MOD M2 INFO  lib389:ticket47787_test.py:97 M1 only INFO  lib389:ticket47787_test.py:108 M2 only INFO  lib389:ticket47787_test.py:119 M1 differs M2 INFO  lib389:ticket47787_test.py:409 ######################### Check MOD replicated on M1 ######################
Passed tickets/ticket47808_test.py::test_ticket47808_run 3.53
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47808_test.py:36 Bind as cn=Directory Manager INFO  lib389:ticket47808_test.py:39 ######################### SETUP ATTR UNIQ PLUGIN ###################### INFO  lib389:ticket47808_test.py:46 ######################### ADD USER 1 ###################### INFO  lib389:ticket47808_test.py:55 Try to add Add dn: cn=test_entry 1, dc=example,dc=com cn: test_entry 1 objectclass: top objectclass: person sn: test_entry : dn: cn=test_entry 1, dc=example,dc=com cn: test_entry 1 objectclass: top objectclass: person sn: test_entry INFO  lib389:ticket47808_test.py:58 ######################### Restart Server ###################### INFO  lib389:ticket47808_test.py:62 ######################### ADD USER 2 ###################### INFO  lib389:ticket47808_test.py:71 Try to add Add dn: cn=test_entry 2, dc=example,dc=com cn: test_entry 2 objectclass: top objectclass: person sn: test_entry : dn: cn=test_entry 2, dc=example,dc=com cn: test_entry 2 objectclass: top objectclass: person sn: test_entry WARNING  lib389:ticket47808_test.py:75 Adding cn=test_entry 2, dc=example,dc=com failed INFO  lib389:ticket47808_test.py:78 ######################### IS SERVER UP? ###################### INFO  lib389:ticket47808_test.py:81 Yes, it's up. INFO  lib389:ticket47808_test.py:83 ######################### CHECK USER 2 NOT ADDED ###################### INFO  lib389:ticket47808_test.py:84 Try to search cn=test_entry 2, dc=example,dc=com INFO  lib389:ticket47808_test.py:88 Found none INFO  lib389:ticket47808_test.py:90 ######################### DELETE USER 1 ###################### INFO  lib389:ticket47808_test.py:92 Try to delete cn=test_entry 1, dc=example,dc=com
Passed tickets/ticket47823_test.py::test_ticket47823_init 11.90
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed tickets/ticket47823_test.py::test_ticket47823_one_container_add 18.00
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47823_test.py:58 ############################################### INFO  lib389:ticket47823_test.py:59 ####### INFO  lib389:ticket47823_test.py:60 ####### With former config (args), check attribute uniqueness with 'cn' (ADD) INFO  lib389:ticket47823_test.py:61 ####### INFO  lib389:ticket47823_test.py:62 ############################################### INFO  lib389:ticket47823_test.py:140 Uniqueness not enforced: create the entries INFO  lib389:ticket47823_test.py:155 Uniqueness enforced: checks second entry is rejected INFO  lib389:ticket47823_test.py:58 ############################################### INFO  lib389:ticket47823_test.py:59 ####### INFO  lib389:ticket47823_test.py:60 ####### With new config (args), check attribute uniqueness with 'cn' (ADD) INFO  lib389:ticket47823_test.py:61 ####### INFO  lib389:ticket47823_test.py:62 ############################################### INFO  lib389:ticket47823_test.py:140 Uniqueness not enforced: create the entries INFO  lib389:ticket47823_test.py:155 Uniqueness enforced: checks second entry is rejected
Passed tickets/ticket47823_test.py::test_ticket47823_one_container_mod 9.24
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47823_test.py:58 ############################################### INFO  lib389:ticket47823_test.py:59 ####### INFO  lib389:ticket47823_test.py:60 ####### With former config (args), check attribute uniqueness with 'cn' (MOD) INFO  lib389:ticket47823_test.py:61 ####### INFO  lib389:ticket47823_test.py:62 ############################################### INFO  lib389:ticket47823_test.py:193 Uniqueness enforced: checks MOD ADD entry is rejected INFO  lib389:ticket47823_test.py:210 Uniqueness enforced: checks MOD REPLACE entry is rejected INFO  lib389:ticket47823_test.py:58 ############################################### INFO  lib389:ticket47823_test.py:59 ####### INFO  lib389:ticket47823_test.py:60 ####### With new config (args), check attribute uniqueness with 'cn' (MOD) INFO  lib389:ticket47823_test.py:61 ####### INFO  lib389:ticket47823_test.py:62 ############################################### INFO  lib389:ticket47823_test.py:193 Uniqueness enforced: checks MOD ADD entry is rejected INFO  lib389:ticket47823_test.py:210 Uniqueness enforced: checks MOD REPLACE entry is rejected
Passed tickets/ticket47823_test.py::test_ticket47823_one_container_modrdn 9.41
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47823_test.py:58 ############################################### INFO  lib389:ticket47823_test.py:59 ####### INFO  lib389:ticket47823_test.py:60 ####### With former config (args), check attribute uniqueness with 'cn' (MODRDN) INFO  lib389:ticket47823_test.py:61 ####### INFO  lib389:ticket47823_test.py:62 ############################################### INFO  lib389:ticket47823_test.py:237 Uniqueness enforced: checks MODRDN entry is rejected INFO  lib389:ticket47823_test.py:58 ############################################### INFO  lib389:ticket47823_test.py:59 ####### INFO  lib389:ticket47823_test.py:60 ####### With former config (args), check attribute uniqueness with 'cn' (MODRDN) INFO  lib389:ticket47823_test.py:61 ####### INFO  lib389:ticket47823_test.py:62 ############################################### INFO  lib389:ticket47823_test.py:237 Uniqueness enforced: checks MODRDN entry is rejected
Passed tickets/ticket47823_test.py::test_ticket47823_multi_containers_add 9.10
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47823_test.py:58 ############################################### INFO  lib389:ticket47823_test.py:59 ####### INFO  lib389:ticket47823_test.py:60 ####### With former config (args), check attribute uniqueness with 'cn' (ADD) INFO  lib389:ticket47823_test.py:61 ####### INFO  lib389:ticket47823_test.py:62 ############################################### INFO  lib389:ticket47823_test.py:58 ############################################### INFO  lib389:ticket47823_test.py:59 ####### INFO  lib389:ticket47823_test.py:60 ####### With new config (args), check attribute uniqueness with 'cn' (ADD) INFO  lib389:ticket47823_test.py:61 ####### INFO  lib389:ticket47823_test.py:62 ###############################################
Passed tickets/ticket47823_test.py::test_ticket47823_multi_containers_mod 9.85
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47823_test.py:58 ############################################### INFO  lib389:ticket47823_test.py:59 ####### INFO  lib389:ticket47823_test.py:60 ####### With former config (args), check attribute uniqueness with 'cn' (MOD) on separated container INFO  lib389:ticket47823_test.py:61 ####### INFO  lib389:ticket47823_test.py:62 ############################################### INFO  lib389:ticket47823_test.py:525 Uniqueness not enforced: if same 'cn' modified (add/replace) on separated containers INFO  lib389:ticket47823_test.py:58 ############################################### INFO  lib389:ticket47823_test.py:59 ####### INFO  lib389:ticket47823_test.py:60 ####### With new config (args), check attribute uniqueness with 'cn' (MOD) on separated container INFO  lib389:ticket47823_test.py:61 ####### INFO  lib389:ticket47823_test.py:62 ############################################### INFO  lib389:ticket47823_test.py:531 Uniqueness not enforced: if same 'cn' modified (add/replace) on separated containers
Passed tickets/ticket47823_test.py::test_ticket47823_multi_containers_modrdn 8.41
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47823_test.py:58 ############################################### INFO  lib389:ticket47823_test.py:59 ####### INFO  lib389:ticket47823_test.py:60 ####### With former config (args), check attribute uniqueness with 'cn' (MODRDN) on separated containers INFO  lib389:ticket47823_test.py:61 ####### INFO  lib389:ticket47823_test.py:62 ############################################### INFO  lib389:ticket47823_test.py:545 Uniqueness not enforced: checks MODRDN entry is accepted on separated containers INFO  lib389:ticket47823_test.py:548 Uniqueness not enforced: checks MODRDN entry is accepted on separated containers
Passed tickets/ticket47823_test.py::test_ticket47823_across_multi_containers_add 5.02
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47823_test.py:58 ############################################### INFO  lib389:ticket47823_test.py:59 ####### INFO  lib389:ticket47823_test.py:60 ####### With new config (args), check attribute uniqueness with 'cn' (ADD) across several containers INFO  lib389:ticket47823_test.py:61 ####### INFO  lib389:ticket47823_test.py:62 ###############################################
Passed tickets/ticket47823_test.py::test_ticket47823_across_multi_containers_mod 4.31
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47823_test.py:58 ############################################### INFO  lib389:ticket47823_test.py:59 ####### INFO  lib389:ticket47823_test.py:60 ####### With new config (args), check attribute uniqueness with 'cn' (MOD) across several containers INFO  lib389:ticket47823_test.py:61 ####### INFO  lib389:ticket47823_test.py:62 ###############################################
Passed tickets/ticket47823_test.py::test_ticket47823_across_multi_containers_modrdn 4.34
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47823_test.py:58 ############################################### INFO  lib389:ticket47823_test.py:59 ####### INFO  lib389:ticket47823_test.py:60 ####### With new config (args), check attribute uniqueness with 'cn' (MODRDN) across several containers INFO  lib389:ticket47823_test.py:61 ####### INFO  lib389:ticket47823_test.py:62 ###############################################
Passed tickets/ticket47823_test.py::test_ticket47823_invalid_config_1 9.76
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47823_test.py:58 ############################################### INFO  lib389:ticket47823_test.py:59 ####### INFO  lib389:ticket47823_test.py:60 ####### Invalid config (old): arg0 is missing INFO  lib389:ticket47823_test.py:61 ####### INFO  lib389:ticket47823_test.py:62 ###############################################
Passed tickets/ticket47823_test.py::test_ticket47823_invalid_config_2 9.12
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47823_test.py:58 ############################################### INFO  lib389:ticket47823_test.py:59 ####### INFO  lib389:ticket47823_test.py:60 ####### Invalid config (old): arg1 is missing INFO  lib389:ticket47823_test.py:61 ####### INFO  lib389:ticket47823_test.py:62 ###############################################
Passed tickets/ticket47823_test.py::test_ticket47823_invalid_config_3 10.18
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47823_test.py:58 ############################################### INFO  lib389:ticket47823_test.py:59 ####### INFO  lib389:ticket47823_test.py:60 ####### Invalid config (old): arg0 is missing but new config attrname exists INFO  lib389:ticket47823_test.py:61 ####### INFO  lib389:ticket47823_test.py:62 ###############################################
Passed tickets/ticket47823_test.py::test_ticket47823_invalid_config_4 9.40
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47823_test.py:58 ############################################### INFO  lib389:ticket47823_test.py:59 ####### INFO  lib389:ticket47823_test.py:60 ####### Invalid config (old): arg1 is missing but new config exist INFO  lib389:ticket47823_test.py:61 ####### INFO  lib389:ticket47823_test.py:62 ###############################################
Passed tickets/ticket47823_test.py::test_ticket47823_invalid_config_5 9.56
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47823_test.py:58 ############################################### INFO  lib389:ticket47823_test.py:59 ####### INFO  lib389:ticket47823_test.py:60 ####### Invalid config (new): uniqueness-attribute-name is missing INFO  lib389:ticket47823_test.py:61 ####### INFO  lib389:ticket47823_test.py:62 ###############################################
Passed tickets/ticket47823_test.py::test_ticket47823_invalid_config_6 9.33
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47823_test.py:58 ############################################### INFO  lib389:ticket47823_test.py:59 ####### INFO  lib389:ticket47823_test.py:60 ####### Invalid config (new): uniqueness-subtrees is missing INFO  lib389:ticket47823_test.py:61 ####### INFO  lib389:ticket47823_test.py:62 ###############################################
Passed tickets/ticket47823_test.py::test_ticket47823_invalid_config_7 10.20
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47823_test.py:58 ############################################### INFO  lib389:ticket47823_test.py:59 ####### INFO  lib389:ticket47823_test.py:60 ####### Invalid config (new): uniqueness-subtrees are invalid INFO  lib389:ticket47823_test.py:61 ####### INFO  lib389:ticket47823_test.py:62 ###############################################
Passed tickets/ticket47828_test.py::test_ticket47828_init 4.47
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed tickets/ticket47828_test.py::test_ticket47828_run_0 0.04
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47828_test.py:42 ############################################### INFO  lib389:ticket47828_test.py:43 ####### INFO  lib389:ticket47828_test.py:44 ####### NO exclude scope: Add an active entry and check its ALLOCATED_ATTR is set INFO  lib389:ticket47828_test.py:45 ####### INFO  lib389:ticket47828_test.py:46 ###############################################
Passed tickets/ticket47828_test.py::test_ticket47828_run_1 0.01
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47828_test.py:42 ############################################### INFO  lib389:ticket47828_test.py:43 ####### INFO  lib389:ticket47828_test.py:44 ####### NO exclude scope: Add an active entry and check its ALLOCATED_ATTR is unchanged (!= magic) INFO  lib389:ticket47828_test.py:45 ####### INFO  lib389:ticket47828_test.py:46 ###############################################
Passed tickets/ticket47828_test.py::test_ticket47828_run_2 0.06
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47828_test.py:42 ############################################### INFO  lib389:ticket47828_test.py:43 ####### INFO  lib389:ticket47828_test.py:44 ####### NO exclude scope: Add a staged entry and check its ALLOCATED_ATTR is set INFO  lib389:ticket47828_test.py:45 ####### INFO  lib389:ticket47828_test.py:46 ###############################################
Passed tickets/ticket47828_test.py::test_ticket47828_run_3 0.26
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47828_test.py:42 ############################################### INFO  lib389:ticket47828_test.py:43 ####### INFO  lib389:ticket47828_test.py:44 ####### NO exclude scope: Add a staged entry and check its ALLOCATED_ATTR is unchanged (!= magic) INFO  lib389:ticket47828_test.py:45 ####### INFO  lib389:ticket47828_test.py:46 ###############################################
Passed tickets/ticket47828_test.py::test_ticket47828_run_4 0.13
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47828_test.py:42 ############################################### INFO  lib389:ticket47828_test.py:43 ####### INFO  lib389:ticket47828_test.py:44 ####### Exclude the provisioning container INFO  lib389:ticket47828_test.py:45 ####### INFO  lib389:ticket47828_test.py:46 ###############################################
Passed tickets/ticket47828_test.py::test_ticket47828_run_5 0.09
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47828_test.py:42 ############################################### INFO  lib389:ticket47828_test.py:43 ####### INFO  lib389:ticket47828_test.py:44 ####### Provisioning excluded scope: Add an active entry and check its ALLOCATED_ATTR is set INFO  lib389:ticket47828_test.py:45 ####### INFO  lib389:ticket47828_test.py:46 ###############################################
Passed tickets/ticket47828_test.py::test_ticket47828_run_6 0.05
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47828_test.py:42 ############################################### INFO  lib389:ticket47828_test.py:43 ####### INFO  lib389:ticket47828_test.py:44 ####### Provisioning excluded scope: Add an active entry and check its ALLOCATED_ATTR is unchanged (!= magic) INFO  lib389:ticket47828_test.py:45 ####### INFO  lib389:ticket47828_test.py:46 ###############################################
Passed tickets/ticket47828_test.py::test_ticket47828_run_7 0.01
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47828_test.py:42 ############################################### INFO  lib389:ticket47828_test.py:43 ####### INFO  lib389:ticket47828_test.py:44 ####### Provisioning excluded scope: Add a staged entry and check its ALLOCATED_ATTR is not set INFO  lib389:ticket47828_test.py:45 ####### INFO  lib389:ticket47828_test.py:46 ###############################################
Passed tickets/ticket47828_test.py::test_ticket47828_run_8 0.01
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47828_test.py:42 ############################################### INFO  lib389:ticket47828_test.py:43 ####### INFO  lib389:ticket47828_test.py:44 ####### Provisioning excluded scope: Add a staged entry and check its ALLOCATED_ATTR is unchanged (!= magic) INFO  lib389:ticket47828_test.py:45 ####### INFO  lib389:ticket47828_test.py:46 ###############################################
Passed tickets/ticket47828_test.py::test_ticket47828_run_9 0.02
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47828_test.py:42 ############################################### INFO  lib389:ticket47828_test.py:43 ####### INFO  lib389:ticket47828_test.py:44 ####### Provisioning excluded scope: Add an dummy entry and check its ALLOCATED_ATTR is set INFO  lib389:ticket47828_test.py:45 ####### INFO  lib389:ticket47828_test.py:46 ###############################################
Passed tickets/ticket47828_test.py::test_ticket47828_run_10 0.01
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47828_test.py:42 ############################################### INFO  lib389:ticket47828_test.py:43 ####### INFO  lib389:ticket47828_test.py:44 ####### Provisioning excluded scope: Add an dummy entry and check its ALLOCATED_ATTR is unchanged (!= magic) INFO  lib389:ticket47828_test.py:45 ####### INFO  lib389:ticket47828_test.py:46 ###############################################
Passed tickets/ticket47828_test.py::test_ticket47828_run_11 0.01
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47828_test.py:42 ############################################### INFO  lib389:ticket47828_test.py:43 ####### INFO  lib389:ticket47828_test.py:44 ####### Exclude (in addition) the dummy container INFO  lib389:ticket47828_test.py:45 ####### INFO  lib389:ticket47828_test.py:46 ###############################################
Passed tickets/ticket47828_test.py::test_ticket47828_run_12 0.02
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47828_test.py:42 ############################################### INFO  lib389:ticket47828_test.py:43 ####### INFO  lib389:ticket47828_test.py:44 ####### Provisioning/Dummy excluded scope: Add an active entry and check its ALLOCATED_ATTR is set INFO  lib389:ticket47828_test.py:45 ####### INFO  lib389:ticket47828_test.py:46 ###############################################
Passed tickets/ticket47828_test.py::test_ticket47828_run_13 0.02
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47828_test.py:42 ############################################### INFO  lib389:ticket47828_test.py:43 ####### INFO  lib389:ticket47828_test.py:44 ####### Provisioning/Dummy excluded scope: Add an active entry and check its ALLOCATED_ATTR is unchanged (!= magic) INFO  lib389:ticket47828_test.py:45 ####### INFO  lib389:ticket47828_test.py:46 ###############################################
Passed tickets/ticket47828_test.py::test_ticket47828_run_14 0.01
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47828_test.py:42 ############################################### INFO  lib389:ticket47828_test.py:43 ####### INFO  lib389:ticket47828_test.py:44 ####### Provisioning/Dummy excluded scope: Add a staged entry and check its ALLOCATED_ATTR is not set INFO  lib389:ticket47828_test.py:45 ####### INFO  lib389:ticket47828_test.py:46 ###############################################
Passed tickets/ticket47828_test.py::test_ticket47828_run_15 0.06
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47828_test.py:42 ############################################### INFO  lib389:ticket47828_test.py:43 ####### INFO  lib389:ticket47828_test.py:44 ####### Provisioning/Dummy excluded scope: Add a staged entry and check its ALLOCATED_ATTR is unchanged (!= magic) INFO  lib389:ticket47828_test.py:45 ####### INFO  lib389:ticket47828_test.py:46 ###############################################
Passed tickets/ticket47828_test.py::test_ticket47828_run_16 0.01
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47828_test.py:42 ############################################### INFO  lib389:ticket47828_test.py:43 ####### INFO  lib389:ticket47828_test.py:44 ####### Provisioning/Dummy excluded scope: Add an dummy entry and check its ALLOCATED_ATTR not is set INFO  lib389:ticket47828_test.py:45 ####### INFO  lib389:ticket47828_test.py:46 ###############################################
Passed tickets/ticket47828_test.py::test_ticket47828_run_17 0.01
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47828_test.py:42 ############################################### INFO  lib389:ticket47828_test.py:43 ####### INFO  lib389:ticket47828_test.py:44 ####### Provisioning/Dummy excluded scope: Add an dummy entry and check its ALLOCATED_ATTR is unchanged (!= magic) INFO  lib389:ticket47828_test.py:45 ####### INFO  lib389:ticket47828_test.py:46 ###############################################
Passed tickets/ticket47828_test.py::test_ticket47828_run_18 0.01
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47828_test.py:42 ############################################### INFO  lib389:ticket47828_test.py:43 ####### INFO  lib389:ticket47828_test.py:44 ####### Exclude PROVISIONING and a wrong container INFO  lib389:ticket47828_test.py:45 ####### INFO  lib389:ticket47828_test.py:46 ###############################################
Passed tickets/ticket47828_test.py::test_ticket47828_run_19 0.02
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47828_test.py:42 ############################################### INFO  lib389:ticket47828_test.py:43 ####### INFO  lib389:ticket47828_test.py:44 ####### Provisioning+wrong container excluded scope: Add an active entry and check its ALLOCATED_ATTR is set INFO  lib389:ticket47828_test.py:45 ####### INFO  lib389:ticket47828_test.py:46 ###############################################
Passed tickets/ticket47828_test.py::test_ticket47828_run_20 0.02
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47828_test.py:42 ############################################### INFO  lib389:ticket47828_test.py:43 ####### INFO  lib389:ticket47828_test.py:44 ####### Provisioning+wrong container excluded scope: Add an active entry and check its ALLOCATED_ATTR is unchanged (!= magic) INFO  lib389:ticket47828_test.py:45 ####### INFO  lib389:ticket47828_test.py:46 ###############################################
Passed tickets/ticket47828_test.py::test_ticket47828_run_21 0.05
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47828_test.py:42 ############################################### INFO  lib389:ticket47828_test.py:43 ####### INFO  lib389:ticket47828_test.py:44 ####### Provisioning+wrong container excluded scope: Add a staged entry and check its ALLOCATED_ATTR is not set INFO  lib389:ticket47828_test.py:45 ####### INFO  lib389:ticket47828_test.py:46 ###############################################
Passed tickets/ticket47828_test.py::test_ticket47828_run_22 0.02
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47828_test.py:42 ############################################### INFO  lib389:ticket47828_test.py:43 ####### INFO  lib389:ticket47828_test.py:44 ####### Provisioning+wrong container excluded scope: Add a staged entry and check its ALLOCATED_ATTR is unchanged (!= magic) INFO  lib389:ticket47828_test.py:45 ####### INFO  lib389:ticket47828_test.py:46 ###############################################
Passed tickets/ticket47828_test.py::test_ticket47828_run_23 0.02
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47828_test.py:42 ############################################### INFO  lib389:ticket47828_test.py:43 ####### INFO  lib389:ticket47828_test.py:44 ####### Provisioning+wrong container excluded scope: Add an dummy entry and check its ALLOCATED_ATTR is set INFO  lib389:ticket47828_test.py:45 ####### INFO  lib389:ticket47828_test.py:46 ###############################################
Passed tickets/ticket47828_test.py::test_ticket47828_run_24 0.01
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47828_test.py:42 ############################################### INFO  lib389:ticket47828_test.py:43 ####### INFO  lib389:ticket47828_test.py:44 ####### Provisioning+wrong container excluded scope: Add an dummy entry and check its ALLOCATED_ATTR is unchanged (!= magic) INFO  lib389:ticket47828_test.py:45 ####### INFO  lib389:ticket47828_test.py:46 ###############################################
Passed tickets/ticket47828_test.py::test_ticket47828_run_25 0.01
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47828_test.py:42 ############################################### INFO  lib389:ticket47828_test.py:43 ####### INFO  lib389:ticket47828_test.py:44 ####### Exclude a wrong container INFO  lib389:ticket47828_test.py:45 ####### INFO  lib389:ticket47828_test.py:46 ###############################################
Passed tickets/ticket47828_test.py::test_ticket47828_run_26 0.01
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47828_test.py:42 ############################################### INFO  lib389:ticket47828_test.py:43 ####### INFO  lib389:ticket47828_test.py:44 ####### Wrong container excluded scope: Add an active entry and check its ALLOCATED_ATTR is set INFO  lib389:ticket47828_test.py:45 ####### INFO  lib389:ticket47828_test.py:46 ###############################################
Passed tickets/ticket47828_test.py::test_ticket47828_run_27 0.01
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47828_test.py:42 ############################################### INFO  lib389:ticket47828_test.py:43 ####### INFO  lib389:ticket47828_test.py:44 ####### Wrong container excluded scope: Add an active entry and check its ALLOCATED_ATTR is unchanged (!= magic) INFO  lib389:ticket47828_test.py:45 ####### INFO  lib389:ticket47828_test.py:46 ###############################################
Passed tickets/ticket47828_test.py::test_ticket47828_run_28 0.02
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47828_test.py:42 ############################################### INFO  lib389:ticket47828_test.py:43 ####### INFO  lib389:ticket47828_test.py:44 ####### Wrong container excluded scope: Add a staged entry and check its ALLOCATED_ATTR is not set INFO  lib389:ticket47828_test.py:45 ####### INFO  lib389:ticket47828_test.py:46 ###############################################
Passed tickets/ticket47828_test.py::test_ticket47828_run_29 0.01
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47828_test.py:42 ############################################### INFO  lib389:ticket47828_test.py:43 ####### INFO  lib389:ticket47828_test.py:44 ####### Wrong container excluded scope: Add a staged entry and check its ALLOCATED_ATTR is unchanged (!= magic) INFO  lib389:ticket47828_test.py:45 ####### INFO  lib389:ticket47828_test.py:46 ###############################################
Passed tickets/ticket47828_test.py::test_ticket47828_run_30 0.02
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47828_test.py:42 ############################################### INFO  lib389:ticket47828_test.py:43 ####### INFO  lib389:ticket47828_test.py:44 ####### Wrong container excluded scope: Add an dummy entry and check its ALLOCATED_ATTR is set INFO  lib389:ticket47828_test.py:45 ####### INFO  lib389:ticket47828_test.py:46 ###############################################
Passed tickets/ticket47828_test.py::test_ticket47828_run_31 0.01
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47828_test.py:42 ############################################### INFO  lib389:ticket47828_test.py:43 ####### INFO  lib389:ticket47828_test.py:44 ####### Wrong container excluded scope: Add an dummy entry and check its ALLOCATED_ATTR is unchanged (!= magic) INFO  lib389:ticket47828_test.py:45 ####### INFO  lib389:ticket47828_test.py:46 ###############################################
Passed tickets/ticket47829_test.py::test_ticket47829_init 4.54
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed tickets/ticket47829_test.py::test_ticket47829_mod_active_user_1 2.06
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47829_test.py:62 ############################################### INFO  lib389:ticket47829_test.py:63 ####### INFO  lib389:ticket47829_test.py:64 ####### MOD: add an active user to an active group INFO  lib389:ticket47829_test.py:65 ####### INFO  lib389:ticket47829_test.py:66 ############################################### INFO  lib389:ticket47829_test.py:172 add entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=active guy,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:172 delete entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com
Passed tickets/ticket47829_test.py::test_ticket47829_mod_active_user_2 2.27
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47829_test.py:62 ############################################### INFO  lib389:ticket47829_test.py:63 ####### INFO  lib389:ticket47829_test.py:64 ####### MOD: add an Active user to a Stage group INFO  lib389:ticket47829_test.py:65 ####### INFO  lib389:ticket47829_test.py:66 ############################################### INFO  lib389:ticket47829_test.py:172 add entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:173 to group cn=stage group,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:116 !!!!!!! cn=stage group,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com: member ->b'cn=active guy,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:172 delete entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:173 to group cn=stage group,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com
Passed tickets/ticket47829_test.py::test_ticket47829_mod_active_user_3 2.16
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47829_test.py:62 ############################################### INFO  lib389:ticket47829_test.py:63 ####### INFO  lib389:ticket47829_test.py:64 ####### MOD: add an Active user to a out of scope group INFO  lib389:ticket47829_test.py:65 ####### INFO  lib389:ticket47829_test.py:66 ############################################### INFO  lib389:ticket47829_test.py:172 add entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:173 to group cn=out group,cn=out,dc=example,dc=com INFO  lib389:ticket47829_test.py:116 !!!!!!! cn=out group,cn=out,dc=example,dc=com: member ->b'cn=active guy,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:172 delete entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:173 to group cn=out group,cn=out,dc=example,dc=com
Passed tickets/ticket47829_test.py::test_ticket47829_mod_stage_user_1 2.27
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47829_test.py:62 ############################################### INFO  lib389:ticket47829_test.py:63 ####### INFO  lib389:ticket47829_test.py:64 ####### MOD: add an Stage user to a Active group INFO  lib389:ticket47829_test.py:65 ####### INFO  lib389:ticket47829_test.py:66 ############################################### INFO  lib389:ticket47829_test.py:172 add entry cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:172 delete entry cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com
Passed tickets/ticket47829_test.py::test_ticket47829_mod_stage_user_2 2.01
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47829_test.py:62 ############################################### INFO  lib389:ticket47829_test.py:63 ####### INFO  lib389:ticket47829_test.py:64 ####### MOD: add an Stage user to a Stage group INFO  lib389:ticket47829_test.py:65 ####### INFO  lib389:ticket47829_test.py:66 ############################################### INFO  lib389:ticket47829_test.py:172 add entry cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:173 to group cn=stage group,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:116 !!!!!!! cn=stage group,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com: member ->b'cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:172 delete entry cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:173 to group cn=stage group,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com
Passed tickets/ticket47829_test.py::test_ticket47829_mod_stage_user_3 2.01
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47829_test.py:62 ############################################### INFO  lib389:ticket47829_test.py:63 ####### INFO  lib389:ticket47829_test.py:64 ####### MOD: add an Stage user to a out of scope group INFO  lib389:ticket47829_test.py:65 ####### INFO  lib389:ticket47829_test.py:66 ############################################### INFO  lib389:ticket47829_test.py:172 add entry cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:173 to group cn=out group,cn=out,dc=example,dc=com INFO  lib389:ticket47829_test.py:116 !!!!!!! cn=out group,cn=out,dc=example,dc=com: member ->b'cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:172 delete entry cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:173 to group cn=out group,cn=out,dc=example,dc=com
Passed tickets/ticket47829_test.py::test_ticket47829_mod_out_user_1 2.04
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47829_test.py:62 ############################################### INFO  lib389:ticket47829_test.py:63 ####### INFO  lib389:ticket47829_test.py:64 ####### MOD: add an out of scope user to an active group INFO  lib389:ticket47829_test.py:65 ####### INFO  lib389:ticket47829_test.py:66 ############################################### INFO  lib389:ticket47829_test.py:172 add entry cn=out guy,cn=out,dc=example,dc=com INFO  lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=out guy,cn=out,dc=example,dc=com' INFO  lib389:ticket47829_test.py:172 delete entry cn=out guy,cn=out,dc=example,dc=com INFO  lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com
Passed tickets/ticket47829_test.py::test_ticket47829_mod_out_user_2 2.01
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47829_test.py:62 ############################################### INFO  lib389:ticket47829_test.py:63 ####### INFO  lib389:ticket47829_test.py:64 ####### MOD: add an out of scope user to a Stage group INFO  lib389:ticket47829_test.py:65 ####### INFO  lib389:ticket47829_test.py:66 ############################################### INFO  lib389:ticket47829_test.py:172 add entry cn=out guy,cn=out,dc=example,dc=com INFO  lib389:ticket47829_test.py:173 to group cn=stage group,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:116 !!!!!!! cn=stage group,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com: member ->b'cn=out guy,cn=out,dc=example,dc=com' INFO  lib389:ticket47829_test.py:172 delete entry cn=out guy,cn=out,dc=example,dc=com INFO  lib389:ticket47829_test.py:173 to group cn=stage group,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com
Passed tickets/ticket47829_test.py::test_ticket47829_mod_out_user_3 2.01
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47829_test.py:62 ############################################### INFO  lib389:ticket47829_test.py:63 ####### INFO  lib389:ticket47829_test.py:64 ####### MOD: add an out of scope user to an out of scope group INFO  lib389:ticket47829_test.py:65 ####### INFO  lib389:ticket47829_test.py:66 ############################################### INFO  lib389:ticket47829_test.py:172 add entry cn=out guy,cn=out,dc=example,dc=com INFO  lib389:ticket47829_test.py:173 to group cn=out group,cn=out,dc=example,dc=com INFO  lib389:ticket47829_test.py:116 !!!!!!! cn=out group,cn=out,dc=example,dc=com: member ->b'cn=out guy,cn=out,dc=example,dc=com' INFO  lib389:ticket47829_test.py:172 delete entry cn=out guy,cn=out,dc=example,dc=com INFO  lib389:ticket47829_test.py:173 to group cn=out group,cn=out,dc=example,dc=com
Passed tickets/ticket47829_test.py::test_ticket47829_mod_active_user_modrdn_active_user_1 2.04
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47829_test.py:62 ############################################### INFO  lib389:ticket47829_test.py:63 ####### INFO  lib389:ticket47829_test.py:64 ####### add an Active user to a Active group. Then move Active user to Active INFO  lib389:ticket47829_test.py:65 ####### INFO  lib389:ticket47829_test.py:66 ############################################### INFO  lib389:ticket47829_test.py:172 add entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=active guy,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:132 ######################### MODRDN cn=xactive guy ###################### INFO  lib389:ticket47829_test.py:96 !!!!!!! cn=xactive guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=xactive guy,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:132 ######################### MODRDN cn=active guy ###################### INFO  lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=active guy,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:172 delete entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com
Passed tickets/ticket47829_test.py::test_ticket47829_mod_active_user_modrdn_stage_user_1 1.06
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47829_test.py:62 ############################################### INFO  lib389:ticket47829_test.py:63 ####### INFO  lib389:ticket47829_test.py:64 ####### add an Active user to a Active group. Then move Active user to Stage INFO  lib389:ticket47829_test.py:65 ####### INFO  lib389:ticket47829_test.py:66 ############################################### INFO  lib389:ticket47829_test.py:172 add entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=active guy,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:132 ######################### MODRDN cn=active guy ###################### INFO  lib389:ticket47829_test.py:132 ######################### MODRDN cn=active guy ######################
Passed tickets/ticket47829_test.py::test_ticket47829_mod_active_user_modrdn_out_user_1 1.03
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47829_test.py:62 ############################################### INFO  lib389:ticket47829_test.py:63 ####### INFO  lib389:ticket47829_test.py:64 ####### add an Active user to a Active group. Then move Active user to out of scope INFO  lib389:ticket47829_test.py:65 ####### INFO  lib389:ticket47829_test.py:66 ############################################### INFO  lib389:ticket47829_test.py:172 add entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=active guy,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:132 ######################### MODRDN cn=active guy ###################### INFO  lib389:ticket47829_test.py:132 ######################### MODRDN cn=active guy ######################
Passed tickets/ticket47829_test.py::test_ticket47829_mod_modrdn_1 1.03
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47829_test.py:62 ############################################### INFO  lib389:ticket47829_test.py:63 ####### INFO  lib389:ticket47829_test.py:64 ####### add an Stage user to a Active group. Then move Stage user to Active INFO  lib389:ticket47829_test.py:65 ####### INFO  lib389:ticket47829_test.py:66 ############################################### INFO  lib389:ticket47829_test.py:172 add entry cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:132 ######################### MODRDN cn=stage guy ###################### INFO  lib389:ticket47829_test.py:96 !!!!!!! cn=stage guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=stage guy,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:132 ######################### MODRDN cn=stage guy ######################
Passed tickets/ticket47829_test.py::test_ticket47829_mod_stage_user_modrdn_active_user_1 1.06
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47829_test.py:62 ############################################### INFO  lib389:ticket47829_test.py:63 ####### INFO  lib389:ticket47829_test.py:64 ####### add an Stage user to a Active group. Then move Stage user to Active INFO  lib389:ticket47829_test.py:65 ####### INFO  lib389:ticket47829_test.py:66 ############################################### INFO  lib389:ticket47829_test.py:172 add entry cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:132 ######################### MODRDN cn=stage guy ###################### INFO  lib389:ticket47829_test.py:96 !!!!!!! cn=stage guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=stage guy,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:132 ######################### MODRDN cn=stage guy ######################
Passed tickets/ticket47829_test.py::test_ticket47829_mod_stage_user_modrdn_stage_user_1 0.00
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47829_test.py:62 ############################################### INFO  lib389:ticket47829_test.py:63 ####### INFO  lib389:ticket47829_test.py:64 ####### add an Stage user to a Active group. Then move Stage user to Stage INFO  lib389:ticket47829_test.py:65 ####### INFO  lib389:ticket47829_test.py:66 ############################################### INFO  lib389:ticket47829_test.py:62 ############################################### INFO  lib389:ticket47829_test.py:63 ####### INFO  lib389:ticket47829_test.py:64 ####### Return because it requires a fix for 47833 INFO  lib389:ticket47829_test.py:65 ####### INFO  lib389:ticket47829_test.py:66 ###############################################
Passed tickets/ticket47829_test.py::test_ticket47829_indirect_active_group_1 2.04
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47829_test.py:62 ############################################### INFO  lib389:ticket47829_test.py:63 ####### INFO  lib389:ticket47829_test.py:64 ####### add an Active group (G1) to an active group (G0). Then add active user to G1 INFO  lib389:ticket47829_test.py:65 ####### INFO  lib389:ticket47829_test.py:66 ############################################### INFO  lib389:ticket47829_test.py:172 add entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=indirect active group,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:172 delete entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com
Passed tickets/ticket47829_test.py::test_ticket47829_indirect_active_group_2 1.04
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47829_test.py:62 ############################################### INFO  lib389:ticket47829_test.py:63 ####### INFO  lib389:ticket47829_test.py:64 ####### add an Active group (G1) to an active group (G0). Then add active user to G1. Then move active user to stage INFO  lib389:ticket47829_test.py:65 ####### INFO  lib389:ticket47829_test.py:66 ############################################### INFO  lib389:ticket47829_test.py:172 add entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=indirect active group,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:132 ######################### MODRDN cn=active guy ###################### INFO  lib389:ticket47829_test.py:132 ######################### MODRDN cn=active guy ######################
Passed tickets/ticket47829_test.py::test_ticket47829_indirect_active_group_3 1.05
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47829_test.py:62 ############################################### INFO  lib389:ticket47829_test.py:63 ####### INFO  lib389:ticket47829_test.py:64 ####### add an Active group (G1) to an active group (G0). Then add active user to G1. Then move active user to out of the scope INFO  lib389:ticket47829_test.py:65 ####### INFO  lib389:ticket47829_test.py:66 ############################################### INFO  lib389:ticket47829_test.py:172 add entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=indirect active group,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:132 ######################### MODRDN cn=active guy ###################### INFO  lib389:ticket47829_test.py:132 ######################### MODRDN cn=active guy ######################
Passed tickets/ticket47829_test.py::test_ticket47829_indirect_active_group_4 1.04
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47829_test.py:62 ############################################### INFO  lib389:ticket47829_test.py:63 ####### INFO  lib389:ticket47829_test.py:64 ####### add an Active group (G1) to an active group (G0). Then add stage user to G1. Then move user to active. Then move it back INFO  lib389:ticket47829_test.py:65 ####### INFO  lib389:ticket47829_test.py:66 ############################################### INFO  lib389:ticket47829_test.py:172 add entry cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com INFO  lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:116 !!!!!!! cn=indirect active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:132 ######################### MODRDN cn=stage guy ###################### INFO  lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=stage guy,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:116 !!!!!!! cn=indirect active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:96 !!!!!!! cn=stage guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:96 !!!!!!! cn=stage guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=indirect active group,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:96 !!!!!!! cn=stage guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO  lib389:ticket47829_test.py:132 ######################### MODRDN cn=stage guy ###################### INFO  lib389:ticket47829_test.py:116 !!!!!!! cn=indirect active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com'
Passed tickets/ticket47833_test.py::test_ticket47829_init 4.50
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed tickets/ticket47833_test.py::test_ticket47829_mod_stage_user_modrdn_stage_user_1 1.28
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47833_test.py:58 ############################################### INFO  lib389:ticket47833_test.py:59 ####### INFO  lib389:ticket47833_test.py:60 ####### add an Stage user to a Active group. Then move Stage user to Stage INFO  lib389:ticket47833_test.py:61 ####### INFO  lib389:ticket47833_test.py:62 ############################################### INFO  lib389:ticket47833_test.py:145 add entry cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com INFO  lib389:ticket47833_test.py:146 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com INFO  lib389:ticket47833_test.py:112 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com' INFO  lib389:ticket47833_test.py:128 ######################### MODRDN cn=xstage guy ###################### INFO  lib389:ticket47833_test.py:112 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com'
Passed tickets/ticket47869MMR_test.py::test_ticket47869_init 12.49
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 94bbb651-1ccc-4f72-8c55-57df949ee685 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 2907d082-d575-4484-9fd9-dc036c9669ba / got description=94bbb651-1ccc-4f72-8c55-57df949ee685) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47869MMR_test.py:51 Add cn=bind_entry, dc=example,dc=com INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect e9c1298b-e532-4e82-996c-aa9aa4a5c1dd / got description=2907d082-d575-4484-9fd9-dc036c9669ba) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 3c12eedc-4baf-4da2-972c-d55e02e23c5f / got description=e9c1298b-e532-4e82-996c-aa9aa4a5c1dd) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 2962d661-92a6-48fc-9249-55c014de7df4 / got description=3c12eedc-4baf-4da2-972c-d55e02e23c5f) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c37c5497-1971-4802-94c0-81ad3ca74fef / got description=2962d661-92a6-48fc-9249-55c014de7df4) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ca8cc245-0e63-49de-825d-f870bba801b7 / got description=c37c5497-1971-4802-94c0-81ad3ca74fef) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect da712cf3-90d6-442c-801a-36d450ac9edc / got description=ca8cc245-0e63-49de-825d-f870bba801b7) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect d0b994fb-3766-4902-877a-6088f17aea55 / got description=da712cf3-90d6-442c-801a-36d450ac9edc) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 8627a43e-70ab-41dc-b234-966edecfa46a / got description=d0b994fb-3766-4902-877a-6088f17aea55) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0b34d2b4-0745-4843-bac2-3f26ec12c212 / got description=8627a43e-70ab-41dc-b234-966edecfa46a) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b01d3b42-dbb4-4079-b7ae-c7889c8ec0fa / got description=0b34d2b4-0745-4843-bac2-3f26ec12c212) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b774fefb-c272-4d82-80f3-852c3474fc00 / got description=b01d3b42-dbb4-4079-b7ae-c7889c8ec0fa) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working
Passed tickets/ticket47869MMR_test.py::test_ticket47869_check 0.13
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47869MMR_test.py:93 ######################### CHECK nscpentrywsi ###################### INFO  lib389:ticket47869MMR_test.py:95 ##### Master1: Bind as cn=Directory Manager ##### INFO  lib389:ticket47869MMR_test.py:98 Master1: Calling search_ext... INFO  lib389:ticket47869MMR_test.py:102 27 results INFO  lib389:ticket47869MMR_test.py:104 Results: INFO  lib389:ticket47869MMR_test.py:106 dn: dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:106 dn: ou=groups,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:106 dn: ou=people,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:106 dn: ou=permissions,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:106 dn: ou=services,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:106 dn: uid=demo_user,ou=people,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:106 dn: cn=demo_group,ou=groups,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:106 dn: cn=group_admin,ou=permissions,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:106 dn: cn=group_modify,ou=permissions,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:106 dn: cn=user_admin,ou=permissions,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:106 dn: cn=user_modify,ou=permissions,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:106 dn: cn=user_passwd_reset,ou=permissions,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:106 dn: cn=user_private_read,ou=permissions,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:106 dn: cn=replication_managers,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:106 dn: cn=ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701,ou=services,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:106 dn: cn=ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702,ou=services,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:106 dn: cn=bind_entry,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:106 dn: cn=test_entry0,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:106 dn: cn=test_entry1,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:106 dn: cn=test_entry2,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:106 dn: cn=test_entry3,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:106 dn: cn=test_entry4,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:106 dn: cn=test_entry5,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:106 dn: cn=test_entry6,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:106 dn: cn=test_entry7,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:106 dn: cn=test_entry8,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:106 dn: cn=test_entry9,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:110 Master1: count of nscpentrywsi: 27 INFO  lib389:ticket47869MMR_test.py:112 ##### Master2: Bind as cn=Directory Manager ##### INFO  lib389:ticket47869MMR_test.py:115 Master2: Calling search_ext... INFO  lib389:ticket47869MMR_test.py:119 27 results INFO  lib389:ticket47869MMR_test.py:121 Results: INFO  lib389:ticket47869MMR_test.py:123 dn: dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:123 dn: ou=groups,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:123 dn: ou=people,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:123 dn: ou=permissions,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:123 dn: ou=services,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:123 dn: cn=replication_managers,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:123 dn: cn=demo_group,ou=groups,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:123 dn: uid=demo_user,ou=people,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:123 dn: cn=group_admin,ou=permissions,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:123 dn: cn=group_modify,ou=permissions,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:123 dn: cn=user_admin,ou=permissions,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:123 dn: cn=user_modify,ou=permissions,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:123 dn: cn=user_passwd_reset,ou=permissions,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:123 dn: cn=user_private_read,ou=permissions,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:123 dn: cn=ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63701,ou=services,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:123 dn: cn=ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:63702,ou=services,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:123 dn: cn=bind_entry,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:123 dn: cn=test_entry0,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:123 dn: cn=test_entry1,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:123 dn: cn=test_entry2,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:123 dn: cn=test_entry3,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:123 dn: cn=test_entry4,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:123 dn: cn=test_entry5,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:123 dn: cn=test_entry6,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:123 dn: cn=test_entry7,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:123 dn: cn=test_entry8,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:123 dn: cn=test_entry9,dc=example,dc=com INFO  lib389:ticket47869MMR_test.py:127 Master2: count of nscpentrywsi: 27 INFO  lib389:ticket47869MMR_test.py:130 ##### Master1: Bind as cn=bind_entry, dc=example,dc=com ##### INFO  lib389:ticket47869MMR_test.py:133 Master1: Calling search_ext... INFO  lib389:ticket47869MMR_test.py:137 27 results INFO  lib389:ticket47869MMR_test.py:143 Master1: count of nscpentrywsi: 0 INFO  lib389:ticket47869MMR_test.py:146 ##### Master2: Bind as cn=bind_entry, dc=example,dc=com ##### INFO  lib389:ticket47869MMR_test.py:149 Master2: Calling search_ext... INFO  lib389:ticket47869MMR_test.py:153 27 results INFO  lib389:ticket47869MMR_test.py:159 Master2: count of nscpentrywsi: 0 INFO  lib389:ticket47869MMR_test.py:162 ##### Master1: Bind as anonymous ##### INFO  lib389:ticket47869MMR_test.py:165 Master1: Calling search_ext... INFO  lib389:ticket47869MMR_test.py:169 27 results INFO  lib389:ticket47869MMR_test.py:175 Master1: count of nscpentrywsi: 0 INFO  lib389:ticket47869MMR_test.py:178 ##### Master2: Bind as anonymous ##### INFO  lib389:ticket47869MMR_test.py:181 Master2: Calling search_ext... INFO  lib389:ticket47869MMR_test.py:185 27 results INFO  lib389:ticket47869MMR_test.py:191 Master2: count of nscpentrywsi: 0 INFO  lib389:ticket47869MMR_test.py:193 ##### ticket47869 was successfully verified. #####
Passed tickets/ticket47871_test.py::test_ticket47871_init 3.94
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for consumer1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:169 Joining consumer consumer1 from master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect bbd1dc6c-9430-42d2-b80a-28b4e36bd419 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is working INFO  lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 INFO  lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 already exists
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47871_test.py:53 test_ticket47871_init topology_m1c1 <lib389.topologies.TopologyMain object at 0x7fd1672e84c0>
Passed tickets/ticket47871_test.py::test_ticket47871_1 1.41
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47871_test.py:71 test_ticket47871_init: 10 entries ADDed other_entry[0..9] INFO  lib389:ticket47871_test.py:78 Added entries are INFO  lib389:ticket47871_test.py:80 changenumber=1,cn=changelog INFO  lib389:ticket47871_test.py:80 changenumber=2,cn=changelog INFO  lib389:ticket47871_test.py:80 changenumber=3,cn=changelog INFO  lib389:ticket47871_test.py:80 changenumber=4,cn=changelog INFO  lib389:ticket47871_test.py:80 changenumber=5,cn=changelog INFO  lib389:ticket47871_test.py:80 changenumber=6,cn=changelog INFO  lib389:ticket47871_test.py:80 changenumber=7,cn=changelog INFO  lib389:ticket47871_test.py:80 changenumber=8,cn=changelog INFO  lib389:ticket47871_test.py:80 changenumber=9,cn=changelog INFO  lib389:ticket47871_test.py:80 changenumber=10,cn=changelog
Passed tickets/ticket47871_test.py::test_ticket47871_2 18.03
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47871_test.py:93 Try no 1 it remains 10 entries INFO  lib389:ticket47871_test.py:95 changenumber=1,cn=changelog INFO  lib389:ticket47871_test.py:95 changenumber=2,cn=changelog INFO  lib389:ticket47871_test.py:95 changenumber=3,cn=changelog INFO  lib389:ticket47871_test.py:95 changenumber=4,cn=changelog INFO  lib389:ticket47871_test.py:95 changenumber=5,cn=changelog INFO  lib389:ticket47871_test.py:95 changenumber=6,cn=changelog INFO  lib389:ticket47871_test.py:95 changenumber=7,cn=changelog INFO  lib389:ticket47871_test.py:95 changenumber=8,cn=changelog INFO  lib389:ticket47871_test.py:95 changenumber=9,cn=changelog INFO  lib389:ticket47871_test.py:95 changenumber=10,cn=changelog INFO  lib389:ticket47871_test.py:93 Try no 2 it remains 10 entries INFO  lib389:ticket47871_test.py:95 changenumber=1,cn=changelog INFO  lib389:ticket47871_test.py:95 changenumber=2,cn=changelog INFO  lib389:ticket47871_test.py:95 changenumber=3,cn=changelog INFO  lib389:ticket47871_test.py:95 changenumber=4,cn=changelog INFO  lib389:ticket47871_test.py:95 changenumber=5,cn=changelog INFO  lib389:ticket47871_test.py:95 changenumber=6,cn=changelog INFO  lib389:ticket47871_test.py:95 changenumber=7,cn=changelog INFO  lib389:ticket47871_test.py:95 changenumber=8,cn=changelog INFO  lib389:ticket47871_test.py:95 changenumber=9,cn=changelog INFO  lib389:ticket47871_test.py:95 changenumber=10,cn=changelog INFO  lib389:ticket47871_test.py:93 Try no 3 it remains 1 entries INFO  lib389:ticket47871_test.py:95 changenumber=10,cn=changelog
Passed tickets/ticket47900_test.py::test_ticket47900 0.76
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47900_test.py:52 Creating Password Administator entry cn=passwd_admin,dc=example,dc=com... INFO  lib389:ticket47900_test.py:62 Configuring password policy... INFO  lib389:ticket47900_test.py:74 Add aci to allow password admin to add/update entries... INFO  lib389:ticket47900_test.py:87 Bind as the Password Administator (before activating)... INFO  lib389:ticket47900_test.py:101 Attempt to add entries with invalid passwords, these adds should fail... INFO  lib389:ticket47900_test.py:105 Create a regular user entry cn=Joe Schmo,dc=example,dc=com with password (2_Short)... INFO  lib389:ticket47900_test.py:111 Add failed as expected: password (2_Short) result (Constraint violation) INFO  lib389:ticket47900_test.py:105 Create a regular user entry cn=Joe Schmo,dc=example,dc=com with password (No_Number)... INFO  lib389:ticket47900_test.py:111 Add failed as expected: password (No_Number) result (Constraint violation) INFO  lib389:ticket47900_test.py:105 Create a regular user entry cn=Joe Schmo,dc=example,dc=com with password (N0Special)... INFO  lib389:ticket47900_test.py:111 Add failed as expected: password (N0Special) result (Constraint violation) INFO  lib389:ticket47900_test.py:105 Create a regular user entry cn=Joe Schmo,dc=example,dc=com with password ({SSHA}bBy8UdtPZwu8uZna9QOYG3Pr41RpIRVDl8wddw==)... INFO  lib389:ticket47900_test.py:111 Add failed as expected: password ({SSHA}bBy8UdtPZwu8uZna9QOYG3Pr41RpIRVDl8wddw==) result (Constraint violation) INFO  lib389:ticket47900_test.py:123 Activate the Password Administator... INFO  lib389:ticket47900_test.py:139 Create a regular user entry cn=Joe Schmo,dc=example,dc=com with password (2_Short)... INFO  lib389:ticket47900_test.py:142 Succesfully added entry (cn=Joe Schmo,dc=example,dc=com) INFO  lib389:ticket47900_test.py:139 Create a regular user entry cn=Joe Schmo,dc=example,dc=com with password (No_Number)... INFO  lib389:ticket47900_test.py:142 Succesfully added entry (cn=Joe Schmo,dc=example,dc=com) INFO  lib389:ticket47900_test.py:139 Create a regular user entry cn=Joe Schmo,dc=example,dc=com with password (N0Special)... INFO  lib389:ticket47900_test.py:142 Succesfully added entry (cn=Joe Schmo,dc=example,dc=com) INFO  lib389:ticket47900_test.py:139 Create a regular user entry cn=Joe Schmo,dc=example,dc=com with password ({SSHA}bBy8UdtPZwu8uZna9QOYG3Pr41RpIRVDl8wddw==)... INFO  lib389:ticket47900_test.py:142 Succesfully added entry (cn=Joe Schmo,dc=example,dc=com) INFO  lib389:ticket47900_test.py:155 Deactivate Password Administator and try invalid password updates... INFO  lib389:ticket47900_test.py:177 Password update failed as expected: password (2_Short) result (Constraint violation) INFO  lib389:ticket47900_test.py:177 Password update failed as expected: password (No_Number) result (Constraint violation) INFO  lib389:ticket47900_test.py:177 Password update failed as expected: password (N0Special) result (Constraint violation) INFO  lib389:ticket47900_test.py:177 Password update failed as expected: password ({SSHA}bBy8UdtPZwu8uZna9QOYG3Pr41RpIRVDl8wddw==) result (Constraint violation) INFO  lib389:ticket47900_test.py:188 Activate Password Administator and try updates again... INFO  lib389:ticket47900_test.py:205 Password update succeeded (2_Short) INFO  lib389:ticket47900_test.py:205 Password update succeeded (No_Number) INFO  lib389:ticket47900_test.py:205 Password update succeeded (N0Special) INFO  lib389:ticket47900_test.py:205 Password update succeeded ({SSHA}bBy8UdtPZwu8uZna9QOYG3Pr41RpIRVDl8wddw==)
Passed tickets/ticket47910_test.py::test_ticket47910_logconv_start_end_positive 0.69
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.utils:ticket47910_test.py:36 Diable access log buffering INFO  lib389.utils:ticket47910_test.py:39 Do a ldapsearch operation INFO  lib389.utils:ticket47910_test.py:42 sleep for sometime so that access log file get generated
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:ticket47910_test.py:79 Running test_ticket47910 - Execute logconv.pl -S -E with random values INFO  lib389.utils:ticket47910_test.py:81 taking current time with offset of 2 mins and formatting it to feed -S INFO  lib389.utils:ticket47910_test.py:85 taking current time with offset of 2 mins and formatting it to feed -E INFO  lib389.utils:ticket47910_test.py:89 Executing logconv.pl with -S and -E INFO  lib389.utils:ticket47910_test.py:61 Executing logconv.pl with -S current time and -E end time INFO  lib389.utils:ticket47910_test.py:63 /usr/bin/logconv.pl -S [28/Oct/2020:23:48:04] -E [28/Oct/2020:23:52:04] /var/log/dirsrv/slapd-standalone1/access INFO  lib389.utils:ticket47910_test.py:66 standard outputAccess Log Analyzer 8.2 Command: logconv.pl /var/log/dirsrv/slapd-standalone1/access Processing 1 Access Log(s)... [001] /var/log/dirsrv/slapd-standalone1/access size (bytes): 9327 Total Log Lines Analysed: 77 ----------- Access Log Output ------------ Start of Logs: 28/Oct/2020:23:48:04 End of Logs: 28/Oct/2020:23:50:03.548639549 Processed Log Time: 0 Hours, 1 Minutes, 59.54864128 Seconds Restarts: 2 Peak Concurrent Connections: 2 Total Operations: 35 Total Results: 33 Overall Performance: 94.3% Total Connections: 3 (0.03/sec) (1.51/min) - LDAP Connections: 1 (0.01/sec) (0.50/min) - LDAPI Connections: 2 (0.02/sec) (1.00/min) - LDAPS Connections: 0 (0.00/sec) (0.00/min) - StartTLS Extended Ops: 0 (0.00/sec) (0.00/min) Searches: 8 (0.07/sec) (4.02/min) Modifications: 4 (0.03/sec) (2.01/min) Adds: 18 (0.15/sec) (9.03/min) Deletes: 0 (0.00/sec) (0.00/min) Mod RDNs: 0 (0.00/sec) (0.00/min) Compares: 0 (0.00/sec) (0.00/min) Binds: 5 (0.04/sec) (2.51/min) Average wtime (wait time): 0.000191461 Average optime (op time): 0.009244228 Average etime (elapsed time): 0.009430478 Proxied Auth Operations: 0 Persistent Searches: 0 Internal Operations: 0 Entry Operations: 0 Extended Operations: 0 Abandoned Requests: 0 Smart Referrals Received: 0 VLV Operations: 0 VLV Unindexed Searches: 0 VLV Unindexed Components: 0 SORT Operations: 0 Entire Search Base Queries: 1 Paged Searches: 0 Unindexed Searches: 0 Unindexed Components: 1 Invalid Attribute Filters: 0 FDs Taken: 3 FDs Returned: 2 Highest FD Taken: 65 Broken Pipes: 0 Connections Reset By Peer: 0 Resource Unavailable: 0 Max BER Size Exceeded: 0 Binds: 5 Unbinds: 1 -------------------------------- - LDAP v2 Binds: 0 - LDAP v3 Binds: 3 - AUTOBINDs(LDAPI): 2 - SSL Client Binds: 0 - Failed SSL Client Binds: 0 - SASL Binds: 2 - EXTERNAL: 2 - Directory Manager Binds: 1 - Anonymous Binds: 0 Cleaning up temp files... Done. INFO  lib389.utils:ticket47910_test.py:67 standard errors
Passed tickets/ticket47910_test.py::test_ticket47910_logconv_start_end_negative 0.16
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:ticket47910_test.py:105 Running test_ticket47910 - Execute logconv.pl -S -E with starttime>endtime INFO  lib389.utils:ticket47910_test.py:107 taking current time with offset of 2 mins and formatting it to feed -S INFO  lib389.utils:ticket47910_test.py:111 taking current time with offset of 2 mins and formatting it to feed -E INFO  lib389.utils:ticket47910_test.py:115 Executing logconv.pl with -S and -E INFO  lib389.utils:ticket47910_test.py:61 Executing logconv.pl with -S current time and -E end time INFO  lib389.utils:ticket47910_test.py:63 /usr/bin/logconv.pl -S [28/Oct/2020:23:52:05] -E [28/Oct/2020:23:48:05] /var/log/dirsrv/slapd-standalone1/access INFO  lib389.utils:ticket47910_test.py:66 standard outputAccess Log Analyzer 8.2 Command: logconv.pl /var/log/dirsrv/slapd-standalone1/access Start time ([28/Oct/2020:23:52:05]) is greater than end time ([28/Oct/2020:23:48:05])! Cleaning up temp files... Done. INFO  lib389.utils:ticket47910_test.py:67 standard errors
Passed tickets/ticket47910_test.py::test_ticket47910_logconv_start_end_invalid 0.14
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:ticket47910_test.py:128 Running test_ticket47910 - Execute logconv.pl -S -E with invalid timestamp INFO  lib389.utils:ticket47910_test.py:129 Set start time and end time to invalid values INFO  lib389.utils:ticket47910_test.py:133 Executing logconv.pl with -S and -E INFO  lib389.utils:ticket47910_test.py:61 Executing logconv.pl with -S current time and -E end time INFO  lib389.utils:ticket47910_test.py:63 /usr/bin/logconv.pl -S invalid -E invalid /var/log/dirsrv/slapd-standalone1/access INFO  lib389.utils:ticket47910_test.py:66 standard outputAccess Log Analyzer 8.2 Command: logconv.pl /var/log/dirsrv/slapd-standalone1/access The date string (invalid) is invalid, exiting... Cleaning up temp files... Done. INFO  lib389.utils:ticket47910_test.py:67 standard errors
Passed tickets/ticket47910_test.py::test_ticket47910_logconv_noaccesslogs 0.14
-------------------------------Captured log call--------------------------------
INFO  lib389.utils:ticket47910_test.py:147 Running test_ticket47910 - Execute logconv.pl without access logs INFO  lib389.utils:ticket47910_test.py:149 taking current time with offset of 2 mins and formatting it to feed -S INFO  lib389.utils:ticket47910_test.py:152 Executing logconv.pl with -S current time INFO  lib389.utils:ticket47910_test.py:154 /usr/bin/logconv.pl -S [28/Oct/2020:23:48:05] INFO  lib389.utils:ticket47910_test.py:157 standard outputThere are no access logs specified, or the tool options have not been used correctly! Cleaning up temp files... Done. INFO  lib389.utils:ticket47910_test.py:158 standard errors
Passed tickets/ticket47920_test.py::test_ticket47920_init 0.28
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed tickets/ticket47920_test.py::test_ticket47920_mod_readentry_ctrl 0.01
------------------------------Captured stdout call------------------------------
['final description']
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket47920_test.py:65 ############################################### INFO  lib389:ticket47920_test.py:66 ####### INFO  lib389:ticket47920_test.py:67 ####### MOD: with a readentry control INFO  lib389:ticket47920_test.py:68 ####### INFO  lib389:ticket47920_test.py:69 ############################################### INFO  lib389:ticket47920_test.py:106 Check the initial value of the entry
Passed tickets/ticket47921_test.py::test_ticket47921 0.08
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket47921_test:ticket47921_test.py:81 Test complete
Passed tickets/ticket47927_test.py::test_ticket47927_init 4.56
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed tickets/ticket47927_test.py::test_ticket47927_one 0.04
-------------------------------Captured log call--------------------------------
CRITICAL tests.tickets.ticket47927_test:ticket47927_test.py:100 test_ticket47927_one: Failed (expected) to set the telephonenumber for cn=test_2,cn=enforced_container,dc=example,dc=com: Constraint violation CRITICAL tests.tickets.ticket47927_test:ticket47927_test.py:111 test_ticket47927_one: Failed (expected) to set the telephonenumber for cn=test_3,cn=excluded_container,dc=example,dc=com: Constraint violation
Passed tickets/ticket47927_test.py::test_ticket47927_two 4.32
No log output captured.
Passed tickets/ticket47927_test.py::test_ticket47927_three 0.17
-------------------------------Captured log call--------------------------------
CRITICAL tests.tickets.ticket47927_test:ticket47927_test.py:151 test_ticket47927_three: Failed (expected) to set the telephonenumber for cn=test_2,cn=enforced_container,dc=example,dc=com: Constraint violation CRITICAL tests.tickets.ticket47927_test:ticket47927_test.py:158 test_ticket47927_three: success to set the telephonenumber for cn=test_3,cn=excluded_container,dc=example,dc=com
Passed tickets/ticket47927_test.py::test_ticket47927_four 0.01
-------------------------------Captured log call--------------------------------
CRITICAL tests.tickets.ticket47927_test:ticket47927_test.py:176 test_ticket47927_four: success to set the telephonenumber for cn=test_3,cn=excluded_container,dc=example,dc=com CRITICAL tests.tickets.ticket47927_test:ticket47927_test.py:198 test_ticket47927_four: Failed (expected) to set the telephonenumber for cn=test_2,cn=enforced_container,dc=example,dc=com: Constraint violation
Passed tickets/ticket47927_test.py::test_ticket47927_five 4.15
No log output captured.
Passed tickets/ticket47927_test.py::test_ticket47927_six 0.05
-------------------------------Captured log call--------------------------------
CRITICAL tests.tickets.ticket47927_test:ticket47927_test.py:240 test_ticket47927_six: Failed (expected) to set the telephonenumber for cn=test_2,cn=enforced_container,dc=example,dc=com: Constraint violation CRITICAL tests.tickets.ticket47927_test:ticket47927_test.py:247 test_ticket47927_six: success to set the telephonenumber for cn=test_3,cn=excluded_container,dc=example,dc=com CRITICAL tests.tickets.ticket47927_test:ticket47927_test.py:256 test_ticket47927_six: success to set the telephonenumber for cn=test_4,cn=excluded_bis_container,dc=example,dc=com
Passed tickets/ticket47931_test.py::test_ticket47931 44.02
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
------------------------------Captured stderr call------------------------------
Exception in thread Thread-71: Traceback (most recent call last): File "/usr/lib64/python3.8/threading.py", line 932, in _bootstrap_inner self.run() File "/export/tests/tickets/ticket47931_test.py", line 36, in run conn.set_option(ldap.OPT_TIMEOUT, self.timeout) File "/usr/local/lib/python3.8/site-packages/lib389/__init__.py", line 180, in inner return f(*args, **kwargs) File "/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py", line 937, in set_option return self._ldap_call(self._l.set_option,option,invalue) File "/usr/local/lib64/python3.8/site-packages/ldap/ldapobject.py", line 362, in __getattr__ raise AttributeError('%s has no attribute %s' % ( AttributeError: DirSrv has no attribute '_l'
-------------------------------Captured log call--------------------------------
INFO  lib389:backend.py:80 List backend with suffix=dc=deadlock INFO  lib389:backend.py:290 Creating a local backend INFO  lib389:backend.py:76 List backend cn=deadlock,cn=ldbm database,cn=plugins,cn=config INFO  lib389:__init__.py:1713 Found entry dn: cn=deadlock,cn=ldbm database,cn=plugins,cn=config cn: deadlock nsslapd-cachememsize: 512000 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-standalone1/db/deadlock nsslapd-dncachememsize: 16777216 nsslapd-readonly: off nsslapd-require-index: off nsslapd-require-internalop-index: off nsslapd-suffix: dc=deadlock objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance INFO  lib389:mappingTree.py:154 Entry dn: cn="dc=deadlock",cn=mapping tree,cn=config cn: dc=deadlock nsslapd-backend: deadlock nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree INFO  lib389:__init__.py:1713 Found entry dn: cn=dc\3Ddeadlock,cn=mapping tree,cn=config cn: dc=deadlock nsslapd-backend: deadlock nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree INFO  tests.tickets.ticket47931_test:ticket47931_test.py:142 Adding members to the group... INFO  tests.tickets.ticket47931_test:ticket47931_test.py:158 Finished adding members to the group. INFO  tests.tickets.ticket47931_test:ticket47931_test.py:164 Test complete
Passed tickets/ticket47953_test.py::test_ticket47953 4.08
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  lib389:tasks.py:498 Import task import_10282020_235201 for file /var/lib/dirsrv/slapd-standalone1/ldif/ticket47953.ldif completed successfully
Passed tickets/ticket47963_test.py::test_ticket47963 8.30
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket47963_test:ticket47963_test.py:145 Test complete
Passed tickets/ticket47970_test.py::test_ticket47970 0.52
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed tickets/ticket47976_test.py::test_ticket47976_init 4.55
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed tickets/ticket47976_test.py::test_ticket47976_1 3.76
No log output captured.
Passed tickets/ticket47976_test.py::test_ticket47976_2 5.24
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket47976_test:ticket47976_test.py:99 Test complete INFO  tests.tickets.ticket47976_test:ticket47976_test.py:104 Export LDIF file... INFO  lib389:tasks.py:567 Export task export_10282020_235255 for file /var/lib/dirsrv/slapd-standalone1/ldif/export.ldif completed successfully INFO  tests.tickets.ticket47976_test:ticket47976_test.py:115 Import LDIF file... INFO  lib389:tasks.py:498 Import task import_10282020_235257 for file /var/lib/dirsrv/slapd-standalone1/ldif/export.ldif completed successfully
Passed tickets/ticket47976_test.py::test_ticket47976_3 0.02
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket47976_test:ticket47976_test.py:131 Testing if the delete will hang or not INFO  tests.tickets.ticket47976_test:ticket47976_test.py:150 user0 was correctly deleted INFO  tests.tickets.ticket47976_test:ticket47976_test.py:150 user1 was correctly deleted
Passed tickets/ticket47980_test.py::test_ticket47980 2.51
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed tickets/ticket47981_test.py::test_ticket47981 3.76
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  lib389:backend.py:80 List backend with suffix=o=netscaperoot INFO  lib389:backend.py:290 Creating a local backend INFO  lib389:backend.py:76 List backend cn=netscaperoot,cn=ldbm database,cn=plugins,cn=config INFO  lib389:__init__.py:1713 Found entry dn: cn=netscaperoot,cn=ldbm database,cn=plugins,cn=config cn: netscaperoot nsslapd-cachememsize: 512000 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-standalone1/db/netscaperoot nsslapd-dncachememsize: 16777216 nsslapd-readonly: off nsslapd-require-index: off nsslapd-require-internalop-index: off nsslapd-suffix: o=netscaperoot objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance INFO  lib389:mappingTree.py:154 Entry dn: cn="o=netscaperoot",cn=mapping tree,cn=config cn: o=netscaperoot nsslapd-backend: netscaperoot nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree INFO  lib389:__init__.py:1713 Found entry dn: cn=o\3Dnetscaperoot,cn=mapping tree,cn=config cn: o=netscaperoot nsslapd-backend: netscaperoot nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree
Passed tickets/ticket48005_test.py::test_ticket48005_memberof 12.73
------------------------------Captured stderr call------------------------------
ls: cannot access '/var/log/dirsrv/slapd-standalone1/core*': No such file or directory
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48005_test:ticket48005_test.py:86 Ticket 48005 memberof test... INFO  lib389:tasks.py:877 fixupMemberOf task fixupmemberof_10282020_235424 for basedn dc=example,dc=com completed successfully INFO  tests.tickets.ticket48005_test:ticket48005_test.py:110 No core files are found INFO  tests.tickets.ticket48005_test:ticket48005_test.py:119 Ticket 48005 memberof test complete
Passed tickets/ticket48005_test.py::test_ticket48005_automember 20.06
------------------------------Captured stderr call------------------------------
ls: cannot access '/var/log/dirsrv/slapd-standalone1/core*': No such file or directory ls: cannot access '/var/log/dirsrv/slapd-standalone1/core*': No such file or directory ls: cannot access '/var/log/dirsrv/slapd-standalone1/core*': No such file or directory
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48005_test:ticket48005_test.py:138 Ticket 48005 automember test... INFO  tests.tickets.ticket48005_test:ticket48005_test.py:143 Adding automember config INFO  lib389:tasks.py:986 Automember Rebuild Membership task(task-10282020_235437) completedsuccessfully INFO  tests.tickets.ticket48005_test:ticket48005_test.py:176 No core files are found INFO  lib389:tasks.py:1039 Automember Export Updates task (task-10282020_235440) completed successfully INFO  tests.tickets.ticket48005_test:ticket48005_test.py:198 No core files are found INFO  lib389:tasks.py:1087 Automember Map Updates task (task-10282020_235444) completed successfully INFO  tests.tickets.ticket48005_test:ticket48005_test.py:222 No core files are found INFO  tests.tickets.ticket48005_test:ticket48005_test.py:231 Ticket 48005 automember test complete
Passed tickets/ticket48005_test.py::test_ticket48005_syntaxvalidate 3.64
------------------------------Captured stderr call------------------------------
ls: cannot access '/var/log/dirsrv/slapd-standalone1/core*': No such file or directory
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48005_test:ticket48005_test.py:241 Ticket 48005 syntax validate test... INFO  lib389:tasks.py:1255 Syntax Validate task (task-10282020_235452) completed successfully INFO  tests.tickets.ticket48005_test:ticket48005_test.py:261 No core files are found INFO  tests.tickets.ticket48005_test:ticket48005_test.py:265 Ticket 48005 syntax validate test complete
Passed tickets/ticket48005_test.py::test_ticket48005_usn 12.83
------------------------------Captured stderr call------------------------------
ls: cannot access '/var/log/dirsrv/slapd-standalone1/core*': No such file or directory
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48005_test:ticket48005_test.py:277 Ticket 48005 usn test... INFO  tests.tickets.ticket48005_test:ticket48005_test.py:285 No user entries. INFO  lib389:tasks.py:1304 USN tombstone cleanup task (task-10282020_235501) completed successfully INFO  tests.tickets.ticket48005_test:ticket48005_test.py:316 No core files are found INFO  tests.tickets.ticket48005_test:ticket48005_test.py:324 Ticket 48005 usn test complete
Passed tickets/ticket48005_test.py::test_ticket48005_schemareload 3.58
------------------------------Captured stderr call------------------------------
ls: cannot access '/var/log/dirsrv/slapd-standalone1/core*': No such file or directory
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48005_test:ticket48005_test.py:334 Ticket 48005 schema reload test... INFO  lib389:tasks.py:1169 Schema Reload task (task-10282020_235509) completed successfully INFO  tests.tickets.ticket48005_test:ticket48005_test.py:354 No core files are found INFO  tests.tickets.ticket48005_test:ticket48005_test.py:358 Ticket 48005 schema reload test complete
Passed tickets/ticket48026_test.py::test_ticket48026 4.67
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48026_test:ticket48026_test.py:114 Test complete
Passed tickets/ticket48109_test.py::test_ticket48109 27.63
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48109_test:ticket48109_test.py:32 Test case 0 INFO  tests.tickets.ticket48109_test:ticket48109_test.py:77 match: conn=1 op=3 INFO  tests.tickets.ticket48109_test:ticket48109_test.py:85 l1: [28/Oct/2020:23:55:56.792021911 -0400] conn=1 op=3 RESULT err=0 tag=101 nentries=1 wtime=0.000249492 optime=0.000335164 etime=0.000581844 INFO  tests.tickets.ticket48109_test:ticket48109_test.py:88 match: nentires=1 INFO  tests.tickets.ticket48109_test:ticket48109_test.py:93 Entry uid=a* found. INFO  tests.tickets.ticket48109_test:ticket48109_test.py:100 Test case 0 - OK - substr index used INFO  tests.tickets.ticket48109_test:ticket48109_test.py:119 Test case 1 INFO  tests.tickets.ticket48109_test:ticket48109_test.py:163 match: conn=1 op=3 INFO  tests.tickets.ticket48109_test:ticket48109_test.py:171 l1: [28/Oct/2020:23:55:56.792021911 -0400] conn=1 op=3 RESULT err=0 tag=101 nentries=1 wtime=0.000249492 optime=0.000335164 etime=0.000581844 INFO  tests.tickets.ticket48109_test:ticket48109_test.py:174 match: nentires=1 INFO  tests.tickets.ticket48109_test:ticket48109_test.py:179 Entry uid=*b found. INFO  tests.tickets.ticket48109_test:ticket48109_test.py:186 Test case 1 - OK - substr index used INFO  tests.tickets.ticket48109_test:ticket48109_test.py:208 Test case 2 INFO  tests.tickets.ticket48109_test:ticket48109_test.py:259 match: conn=1 op=3 INFO  tests.tickets.ticket48109_test:ticket48109_test.py:267 l1: [28/Oct/2020:23:55:56.792021911 -0400] conn=1 op=3 RESULT err=0 tag=101 nentries=1 wtime=0.000249492 optime=0.000335164 etime=0.000581844 INFO  tests.tickets.ticket48109_test:ticket48109_test.py:270 match: nentires=1 INFO  tests.tickets.ticket48109_test:ticket48109_test.py:275 Entry uid=c* found. INFO  tests.tickets.ticket48109_test:ticket48109_test.py:282 Test case 2-1 - OK - correct substr index used INFO  tests.tickets.ticket48109_test:ticket48109_test.py:294 match: conn=1 op=4 INFO  tests.tickets.ticket48109_test:ticket48109_test.py:302 l1: [28/Oct/2020:23:56:15.280232289 -0400] conn=1 op=4 RESULT err=0 tag=101 nentries=1 wtime=0.000252158 optime=0.000373227 etime=0.000622209 INFO  tests.tickets.ticket48109_test:ticket48109_test.py:305 match: nentires=1 INFO  tests.tickets.ticket48109_test:ticket48109_test.py:310 Entry uid=*2 found. INFO  tests.tickets.ticket48109_test:ticket48109_test.py:317 Test case 2-2 - OK - correct substr index used INFO  tests.tickets.ticket48109_test:ticket48109_test.py:331 Testcase PASSED
Passed tickets/ticket48170_test.py::test_ticket48170 0.01
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48170_test:ticket48170_test.py:29 Index update correctly rejected INFO  tests.tickets.ticket48170_test:ticket48170_test.py:36 Test complete
Passed tickets/ticket48194_test.py::test_init 7.33
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket48194_test.py:40 ############################################### INFO  lib389:ticket48194_test.py:41 ####### Testing Ticket 48194 - harden the list of ciphers available by default INFO  lib389:ticket48194_test.py:42 ############################################### INFO  lib389.utils:ticket48194_test.py:57 ######################### enable SSL in the directory server with all ciphers ######################
Passed tickets/ticket48194_test.py::test_run_0 6.12
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket48194_test.py:40 ############################################### INFO  lib389:ticket48194_test.py:41 ####### Test Case 1 - Check the ciphers availability for "+all"; allowWeakCipher: on INFO  lib389:ticket48194_test.py:42 ############################################### INFO  lib389.utils:ticket48194_test.py:131 ######################### Restarting the server ###################### INFO  lib389.utils:ticket48194_test.py:86 Testing DES-CBC3-SHA -- expect to handshake successfully INFO  lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher DES-CBC3-SHA INFO  lib389.utils:ticket48194_test.py:105 Found: b'New, TLSv1.3, Cipher is TLS_AES_128_GCM_SHA256\n' INFO  lib389.utils:ticket48194_test.py:86 Testing AES256-SHA256 -- expect to handshake successfully INFO  lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher AES256-SHA256 INFO  lib389.utils:ticket48194_test.py:105 Found: b'New, TLSv1.3, Cipher is TLS_AES_128_GCM_SHA256\n'
Passed tickets/ticket48194_test.py::test_run_3 5.44
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket48194_test.py:40 ############################################### INFO  lib389:ticket48194_test.py:41 ####### Test Case 4 - Check the ciphers availability for "-all" INFO  lib389:ticket48194_test.py:42 ############################################### INFO  lib389.utils:ticket48194_test.py:199 ######################### Restarting the server ###################### INFO  lib389.utils:ticket48194_test.py:86 Testing DES-CBC3-SHA -- expect to handshake failed INFO  lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher DES-CBC3-SHA INFO  lib389.utils:ticket48194_test.py:105 Found: b'New, (NONE), Cipher is (NONE)\n' INFO  lib389.utils:ticket48194_test.py:86 Testing AES256-SHA256 -- expect to handshake failed INFO  lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher AES256-SHA256 INFO  lib389.utils:ticket48194_test.py:105 Found: b'New, (NONE), Cipher is (NONE)\n'
Passed tickets/ticket48194_test.py::test_run_9 6.60
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket48194_test.py:40 ############################################### INFO  lib389:ticket48194_test.py:41 ####### Test Case 10 - Check no nsSSL3Ciphers (default setting) with no errorlog-level & allowWeakCipher on INFO  lib389:ticket48194_test.py:42 ############################################### INFO  lib389.utils:ticket48194_test.py:316 ######################### Restarting the server ###################### INFO  lib389.utils:ticket48194_test.py:86 Testing DES-CBC3-SHA -- expect to handshake successfully INFO  lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher DES-CBC3-SHA INFO  lib389.utils:ticket48194_test.py:105 Found: b'New, TLSv1.3, Cipher is TLS_AES_128_GCM_SHA256\n' INFO  lib389.utils:ticket48194_test.py:86 Testing AES256-SHA256 -- expect to handshake successfully INFO  lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher AES256-SHA256 INFO  lib389.utils:ticket48194_test.py:105 Found: b'New, TLSv1.3, Cipher is TLS_AES_128_GCM_SHA256\n'
Passed tickets/ticket48194_test.py::test_run_11 5.46
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket48194_test.py:40 ############################################### INFO  lib389:ticket48194_test.py:41 ####### Test Case 12 - Check nsSSL3Ciphers: +fortezza, which is not supported INFO  lib389:ticket48194_test.py:42 ############################################### INFO  lib389.utils:ticket48194_test.py:337 ######################### Restarting the server ###################### INFO  lib389.utils:ticket48194_test.py:86 Testing DES-CBC3-SHA -- expect to handshake failed INFO  lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher DES-CBC3-SHA INFO  lib389.utils:ticket48194_test.py:105 Found: b'New, (NONE), Cipher is (NONE)\n' INFO  lib389.utils:ticket48194_test.py:86 Testing AES256-SHA256 -- expect to handshake failed INFO  lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher AES256-SHA256 INFO  lib389.utils:ticket48194_test.py:105 Found: b'New, (NONE), Cipher is (NONE)\n'
Passed tickets/ticket48212_test.py::test_ticket48212 12.40
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
------------------------------Captured stderr call------------------------------
/bin/sh: /usr/sbin/dbverify: No such file or directory /bin/sh: /usr/sbin/dbverify: No such file or directory /bin/sh: /usr/sbin/dbverify: No such file or directory /bin/sh: /usr/sbin/dbverify: No such file or directory
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket48212_test.py:70 Bind as cn=Directory Manager INFO  lib389:ticket48212_test.py:83 ######################### Import Test data (/var/lib/dirsrv/slapd-standalone1/ldif/example1k_posix.ldif) ###################### INFO  lib389:tasks.py:498 Import task import_10282020_235808 for file /var/lib/dirsrv/slapd-standalone1/ldif/example1k_posix.ldif completed successfully INFO  lib389:ticket48212_test.py:19 +++++ dbverify +++++ INFO  lib389:ticket48212_test.py:23 Running /usr/sbin/dbverify -Z standalone1 -V INFO  lib389:ticket48212_test.py:43 dbverify passed INFO  lib389:ticket48212_test.py:92 ######################### Add index by uidnumber ###################### INFO  lib389:ticket48212_test.py:101 ######################### reindexing... ###################### INFO  lib389:ticket48212_test.py:47 +++++ reindex uidnumber +++++ INFO  lib389:tasks.py:798 Index task index_attrs_10282020_235811 completed successfully INFO  lib389:ticket48212_test.py:19 +++++ dbverify +++++ INFO  lib389:ticket48212_test.py:23 Running /usr/sbin/dbverify -Z standalone1 -V INFO  lib389:ticket48212_test.py:43 dbverify passed INFO  lib389:ticket48212_test.py:106 ######################### Add nsMatchingRule ###################### INFO  lib389:ticket48212_test.py:112 ######################### reindexing... ###################### INFO  lib389:ticket48212_test.py:47 +++++ reindex uidnumber +++++ INFO  lib389:tasks.py:798 Index task index_attrs_10282020_235814 completed successfully INFO  lib389:ticket48212_test.py:19 +++++ dbverify +++++ INFO  lib389:ticket48212_test.py:23 Running /usr/sbin/dbverify -Z standalone1 -V INFO  lib389:ticket48212_test.py:43 dbverify passed INFO  lib389:ticket48212_test.py:117 ######################### Delete nsMatchingRule ###################### INFO  lib389:ticket48212_test.py:47 +++++ reindex uidnumber +++++ INFO  lib389:tasks.py:798 Index task index_attrs_10282020_235817 completed successfully INFO  lib389:ticket48212_test.py:19 +++++ dbverify +++++ INFO  lib389:ticket48212_test.py:23 Running /usr/sbin/dbverify -Z standalone1 -V INFO  lib389:ticket48212_test.py:43 dbverify passed
Passed tickets/ticket48214_test.py::test_ticket48214_run 0.47
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket48214_test.py:83 Bind as cn=Directory Manager INFO  lib389:ticket48214_test.py:86 ######################### Out of Box ###################### INFO  lib389:ticket48214_test.py:44 +++++ Check Max Ber Size +++++ INFO  lib389:ticket48214_test.py:18 +++++ Get maxbersize from dse.ldif +++++ INFO  lib389:ticket48214_test.py:21 Run CMD: egrep nsslapd-maxbersize /etc/dirsrv/slapd-standalone1/dse.ldif INFO  lib389:ticket48214_test.py:28 Empty: INFO  lib389:ticket48214_test.py:50 No nsslapd-maxbersize found in dse.ldif INFO  lib389:ticket48214_test.py:63 ldapsearch returned nsslapd-maxbersize: b'2097152' INFO  lib389:ticket48214_test.py:72 Checking 2097152 vs 2097152 INFO  lib389:ticket48214_test.py:89 ######################### Add nsslapd-maxbersize: 0 ###################### INFO  lib389:ticket48214_test.py:44 +++++ Check Max Ber Size +++++ INFO  lib389:ticket48214_test.py:18 +++++ Get maxbersize from dse.ldif +++++ INFO  lib389:ticket48214_test.py:21 Run CMD: egrep nsslapd-maxbersize /etc/dirsrv/slapd-standalone1/dse.ldif INFO  lib389:ticket48214_test.py:35 Right format - nsslapd-maxbersize: 0 INFO  lib389:ticket48214_test.py:52 nsslapd-maxbersize: 0 INFO  lib389:ticket48214_test.py:63 ldapsearch returned nsslapd-maxbersize: b'2097152' INFO  lib389:ticket48214_test.py:72 Checking 2097152 vs 2097152 INFO  lib389:ticket48214_test.py:93 ######################### Add nsslapd-maxbersize: 10000 ###################### INFO  lib389:ticket48214_test.py:44 +++++ Check Max Ber Size +++++ INFO  lib389:ticket48214_test.py:18 +++++ Get maxbersize from dse.ldif +++++ INFO  lib389:ticket48214_test.py:21 Run CMD: egrep nsslapd-maxbersize /etc/dirsrv/slapd-standalone1/dse.ldif INFO  lib389:ticket48214_test.py:35 Right format - nsslapd-maxbersize: 10000 INFO  lib389:ticket48214_test.py:55 nsslapd-maxbersize: 10000 INFO  lib389:ticket48214_test.py:63 ldapsearch returned nsslapd-maxbersize: b'10000' INFO  lib389:ticket48214_test.py:98 ticket48214 was successfully verified.
Passed tickets/ticket48233_test.py::test_ticket48233 5.50
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48233_test:ticket48233_test.py:54 Test complete
Passed tickets/ticket48252_test.py::test_ticket48252_setup 0.40
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed tickets/ticket48252_test.py::test_ticket48252_run_0 11.61
------------------------------Captured stderr call------------------------------
[28/Oct/2020:23:59:30.929818221 -0400] - INFO - slapd_exemode_db2index - Backend Instance: userRoot [28/Oct/2020:23:59:30.936077776 -0400] - INFO - ldbm_instance_config_cachememsize_set - force a minimal value 512000 [28/Oct/2020:23:59:30.943952937 -0400] - INFO - bdb_instance_start - Import is running with nsslapd-db-private-import-mem on; No other process is allowed to access the database [28/Oct/2020:23:59:30.947195590 -0400] - INFO - check_and_set_import_cache - pagesize: 4096, available bytes 7474622464, process usage 23134208 [28/Oct/2020:23:59:30.950239150 -0400] - INFO - check_and_set_import_cache - Import allocates 2919774KB import cache. [28/Oct/2020:23:59:31.224314992 -0400] - INFO - bdb_db2index - userRoot: Indexing attribute: cn [28/Oct/2020:23:59:31.227717480 -0400] - ERR - libdb - BDB1566 txn_checkpoint interface requires an environment configured for the transaction subsystem [28/Oct/2020:23:59:31.230342217 -0400] - ERR - bdb_force_checkpoint - Checkpoint FAILED, error Invalid argument (22) [28/Oct/2020:23:59:31.238128916 -0400] - INFO - bdb_db2index - userRoot: Finished indexing. [28/Oct/2020:23:59:31.264844362 -0400] - INFO - bdb_pre_close - All database threads now stopped
-------------------------------Captured log call--------------------------------
INFO  lib389:__init__.py:3014 Running script: ['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-standalone1/db/userRoot/cn.db'] INFO  lib389:ticket48252_test.py:63 Did not found key test_user0 in dbscan output INFO  lib389:__init__.py:3014 Running script: ['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-standalone1/db/userRoot/cn.db'] INFO  lib389:ticket48252_test.py:63 Did not found key test_user0 in dbscan output
Passed tickets/ticket48252_test.py::test_ticket48252_run_1 4.43
------------------------------Captured stderr call------------------------------
[28/Oct/2020:23:59:39.065328085 -0400] - INFO - slapd_exemode_db2index - Backend Instance: userRoot [28/Oct/2020:23:59:39.112284298 -0400] - INFO - ldbm_instance_config_cachememsize_set - force a minimal value 512000 [28/Oct/2020:23:59:39.120682616 -0400] - INFO - bdb_instance_start - Import is running with nsslapd-db-private-import-mem on; No other process is allowed to access the database [28/Oct/2020:23:59:39.125871654 -0400] - INFO - check_and_set_import_cache - pagesize: 4096, available bytes 7473991680, process usage 23166976 [28/Oct/2020:23:59:39.129067365 -0400] - INFO - check_and_set_import_cache - Import allocates 2919528KB import cache. [28/Oct/2020:23:59:39.387399907 -0400] - INFO - bdb_db2index - userRoot: Indexing attribute: objectclass [28/Oct/2020:23:59:39.390673452 -0400] - ERR - libdb - BDB1566 txn_checkpoint interface requires an environment configured for the transaction subsystem [28/Oct/2020:23:59:39.393811128 -0400] - ERR - bdb_force_checkpoint - Checkpoint FAILED, error Invalid argument (22) [28/Oct/2020:23:59:39.402421227 -0400] - INFO - bdb_db2index - userRoot: Finished indexing. [28/Oct/2020:23:59:39.427687798 -0400] - INFO - bdb_pre_close - All database threads now stopped
Passed tickets/ticket48265_test.py::test_ticket48265_test 0.68
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48265_test:ticket48265_test.py:34 Adding 20 test entries... INFO  tests.tickets.ticket48265_test:ticket48265_test.py:51 Search with Ticket 47521 type complex filter INFO  tests.tickets.ticket48265_test:ticket48265_test.py:60 Search with Ticket 48265 type complex filter INFO  tests.tickets.ticket48265_test:ticket48265_test.py:69 Test 48265 complete
Passed tickets/ticket48266_test.py::test_ticket48266_fractional 19.56
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 9893a161-2609-4aa6-b610-a163eb2a99ee / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect b718d5e0-e4e7-4fb4-9036-ad904b54076d / got description=9893a161-2609-4aa6-b610-a163eb2a99ee) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
-------------------------------Captured log call--------------------------------
INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 173b1a04-1bce-4f49-95ae-b2b202a191ab / got description=b718d5e0-e4e7-4fb4-9036-ad904b54076d) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 173b1a04-1bce-4f49-95ae-b2b202a191ab / got description=b718d5e0-e4e7-4fb4-9036-ad904b54076d) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 173b1a04-1bce-4f49-95ae-b2b202a191ab / got description=b718d5e0-e4e7-4fb4-9036-ad904b54076d) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 173b1a04-1bce-4f49-95ae-b2b202a191ab / got description=b718d5e0-e4e7-4fb4-9036-ad904b54076d) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 173b1a04-1bce-4f49-95ae-b2b202a191ab / got description=b718d5e0-e4e7-4fb4-9036-ad904b54076d) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 173b1a04-1bce-4f49-95ae-b2b202a191ab / got description=b718d5e0-e4e7-4fb4-9036-ad904b54076d) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 173b1a04-1bce-4f49-95ae-b2b202a191ab / got description=b718d5e0-e4e7-4fb4-9036-ad904b54076d) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working
Passed tickets/ticket48266_test.py::test_ticket48266_check_repl_desc 1.12
No log output captured.
Passed tickets/ticket48270_test.py::test_ticket48270_init 0.62
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48270_test:ticket48270_test.py:26 Initialization: add dummy entries for the tests
Passed tickets/ticket48270_test.py::test_ticket48270_homeDirectory_indexed_cis 2.02
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48270_test:ticket48270_test.py:39 index homeDirectory in caseIgnoreIA5Match and caseExactIA5Match INFO  tests.tickets.ticket48270_test:ticket48270_test.py:57 successfully checked that filter with exact mr , a filter with lowercase eq is failing INFO  lib389:tasks.py:798 Index task index_attrs_10292020_000057 completed successfully INFO  tests.tickets.ticket48270_test:ticket48270_test.py:63 Check indexing succeeded with a specified matching rule
Passed tickets/ticket48270_test.py::test_ticket48270_homeDirectory_mixed_value 0.01
No log output captured.
Passed tickets/ticket48270_test.py::test_ticket48270_extensible_search 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48270_test:ticket48270_test.py:91 Default: can retrieve an entry filter syntax with exact stored value INFO  tests.tickets.ticket48270_test:ticket48270_test.py:93 Default: can retrieve an entry filter caseExactIA5Match with exact stored value INFO  tests.tickets.ticket48270_test:ticket48270_test.py:97 Default: can not retrieve an entry filter syntax match with lowered stored value INFO  tests.tickets.ticket48270_test:ticket48270_test.py:103 Default: can not retrieve an entry filter caseExactIA5Match with lowered stored value INFO  tests.tickets.ticket48270_test:ticket48270_test.py:110 Default: can retrieve an entry filter caseIgnoreIA5Match with lowered stored value
Passed tickets/ticket48272_test.py::test_ticket48272 9.38
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48272_test:ticket48272_test.py:129 Test PASSED
Passed tickets/ticket48294_test.py::test_48294_init 0.56
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket48294_test.py:31 ############################################### INFO  lib389:ticket48294_test.py:32 ####### Testing Ticket 48294 - Linked Attributes plug-in - won't update links after MODRDN operation INFO  lib389:ticket48294_test.py:33 ###############################################
Passed tickets/ticket48294_test.py::test_48294_run_0 0.01
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket48294_test.py:31 ############################################### INFO  lib389:ticket48294_test.py:32 ####### Case 0 - Rename employee1 and adjust the link type value by replace INFO  lib389:ticket48294_test.py:33 ############################################### INFO  lib389:ticket48294_test.py:59 ######################### MODRDN uid=employee2 ######################
Passed tickets/ticket48294_test.py::test_48294_run_1 0.02
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket48294_test.py:31 ############################################### INFO  lib389:ticket48294_test.py:32 ####### Case 1 - Rename employee2 and adjust the link type value by delete and add INFO  lib389:ticket48294_test.py:33 ############################################### INFO  lib389:ticket48294_test.py:59 ######################### MODRDN uid=employee3 ######################
Passed tickets/ticket48294_test.py::test_48294_run_2 0.01
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket48294_test.py:31 ############################################### INFO  lib389:ticket48294_test.py:32 ####### Case 2 - Rename manager1 to manager2 and make sure the managed attribute value is updated INFO  lib389:ticket48294_test.py:33 ############################################### INFO  lib389:ticket48294_test.py:59 ######################### MODRDN uid=manager2 ######################
Passed tickets/ticket48295_test.py::test_48295_init 0.54
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket48295_test.py:30 ############################################### INFO  lib389:ticket48295_test.py:31 ####### Testing Ticket 48295 - Entry cache is not rolled back -- Linked Attributes plug-in - wrong behaviour when adding valid and broken links INFO  lib389:ticket48295_test.py:32 ###############################################
Passed tickets/ticket48295_test.py::test_48295_run 0.04
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket48295_test.py:30 ############################################### INFO  lib389:ticket48295_test.py:31 ####### Add 2 linktypes to manager1 - one exists, another does not to make sure the managed entry does not have managed type. INFO  lib389:ticket48295_test.py:32 ###############################################
Passed tickets/ticket48312_test.py::test_ticket48312 0.35
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48312_test:ticket48312_test.py:117 Test complete
Passed tickets/ticket48354_test.py::test_ticket48354 0.01
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48354_test:ticket48354_test.py:50 Test PASSED
Passed tickets/ticket48362_test.py::test_ticket48362 92.50
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect e31c1558-3856-41d2-b8bf-d55cc3289c4f / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ee55cce0-2935-4fa3-a249-9ba459f79553 / got description=e31c1558-3856-41d2-b8bf-d55cc3289c4f) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48362_test:ticket48362_test.py:28 Add dna plugin config entry...ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 INFO  tests.tickets.ticket48362_test:ticket48362_test.py:48 Enable the DNA plugin... INFO  tests.tickets.ticket48362_test:ticket48362_test.py:55 Restarting the server... INFO  tests.tickets.ticket48362_test:ticket48362_test.py:28 Add dna plugin config entry...ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  tests.tickets.ticket48362_test:ticket48362_test.py:48 Enable the DNA plugin... INFO  tests.tickets.ticket48362_test:ticket48362_test.py:55 Restarting the server... INFO  tests.tickets.ticket48362_test:ticket48362_test.py:83 ======================== Update dnaPortNum=39001 ============================ INFO  tests.tickets.ticket48362_test:ticket48362_test.py:90 ======================== Update done INFO  tests.tickets.ticket48362_test:ticket48362_test.py:83 ======================== Update dnaPortNum=39002 ============================ INFO  tests.tickets.ticket48362_test:ticket48362_test.py:90 ======================== Update done INFO  tests.tickets.ticket48362_test:ticket48362_test.py:132 ======================== BEFORE RESTART ============================ INFO  tests.tickets.ticket48362_test:ticket48362_test.py:135 ======================== BEFORE RESTART ============================ INFO  tests.tickets.ticket48362_test:ticket48362_test.py:141 ======================== BEFORE RESTART ============================ INFO  tests.tickets.ticket48362_test:ticket48362_test.py:150 =================== AFTER RESTART ================================= INFO  tests.tickets.ticket48362_test:ticket48362_test.py:153 =================== AFTER RESTART ================================= INFO  tests.tickets.ticket48362_test:ticket48362_test.py:159 =================== AFTER RESTART ================================= INFO  tests.tickets.ticket48362_test:ticket48362_test.py:162 Test complete
Passed tickets/ticket48366_test.py::test_ticket48366_init 0.17
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket48366_test.py:44 Add subtree: ou=green,dc=example,dc=com INFO  lib389:ticket48366_test.py:48 Add subtree: ou=red,dc=example,dc=com INFO  lib389:ticket48366_test.py:54 Add cn=test,ou=people,dc=example,dc=com INFO  lib389:ticket48366_test.py:60 Add cn=proxy,ou=people,dc=example,dc=com INFO  lib389.utils:ticket48366_test.py:90 Adding %d test entries...
Passed tickets/ticket48366_test.py::test_ticket48366_search_user 0.06
No log output captured.
Passed tickets/ticket48366_test.py::test_ticket48366_search_dm 0.03
No log output captured.
Passed tickets/ticket48370_test.py::test_ticket48370 1.03
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48370_test:ticket48370_test.py:187 Test PASSED
Passed tickets/ticket48383_test.py::test_ticket48383 45.51
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
------------------------------Captured stderr call------------------------------
ldiffile: /var/lib/dirsrv/slapd-standalone1/ldif/standalone1.ldif
-------------------------------Captured log call--------------------------------
CRITICAL tests.tickets.ticket48383_test:ticket48383_test.py:62 Failed to change nsslapd-cachememsize No such object INFO  tests.tickets.ticket48383_test:ticket48383_test.py:88 Test complete
Passed tickets/ticket48497_test.py::test_ticket48497_init 1.35
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48497_test:ticket48497_test.py:26 Initialization: add dummy entries for the tests
Passed tickets/ticket48497_test.py::test_ticket48497_homeDirectory_mixed_value 0.00
No log output captured.
Passed tickets/ticket48497_test.py::test_ticket48497_extensible_search 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48497_test:ticket48497_test.py:49 Default: can retrieve an entry filter syntax with exact stored value INFO  tests.tickets.ticket48497_test:ticket48497_test.py:51 Default: can retrieve an entry filter caseExactIA5Match with exact stored value INFO  tests.tickets.ticket48497_test:ticket48497_test.py:55 Default: can not retrieve an entry filter syntax match with lowered stored value INFO  tests.tickets.ticket48497_test:ticket48497_test.py:61 Default: can not retrieve an entry filter caseExactIA5Match with lowered stored value INFO  tests.tickets.ticket48497_test:ticket48497_test.py:68 Default: can retrieve an entry filter caseIgnoreIA5Match with lowered stored value
Passed tickets/ticket48497_test.py::test_ticket48497_homeDirectory_index_cfg 0.01
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48497_test:ticket48497_test.py:73 index homeDirectory in caseIgnoreIA5Match and caseExactIA5Match
Passed tickets/ticket48497_test.py::test_ticket48497_homeDirectory_index_run 2.02
-------------------------------Captured log call--------------------------------
INFO  lib389:tasks.py:798 Index task index_attrs_10292020_000850 completed successfully INFO  tests.tickets.ticket48497_test:ticket48497_test.py:93 Check indexing succeeded with a specified matching rule
Passed tickets/ticket48665_test.py::test_ticket48665 0.30
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48665_test:ticket48665_test.py:40 8 entries are returned from the server. CRITICAL tests.tickets.ticket48665_test:ticket48665_test.py:47 Failed to change nsslapd-cachememsize No such object INFO  tests.tickets.ticket48665_test:ticket48665_test.py:52 8 entries are returned from the server. INFO  tests.tickets.ticket48665_test:ticket48665_test.py:63 8 entries are returned from the server. INFO  tests.tickets.ticket48665_test:ticket48665_test.py:65 Test complete
Passed tickets/ticket48745_test.py::test_ticket48745_init 0.64
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48745_test:ticket48745_test.py:26 Initialization: add dummy entries for the tests
Passed tickets/ticket48745_test.py::test_ticket48745_homeDirectory_indexed_cis 2.03
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48745_test:ticket48745_test.py:39 index homeDirectory in caseIgnoreIA5Match and caseExactIA5Match INFO  tests.tickets.ticket48745_test:ticket48745_test.py:57 successfully checked that filter with exact mr , a filter with lowercase eq is failing INFO  lib389:tasks.py:798 Index task index_attrs_10292020_000932 completed successfully INFO  tests.tickets.ticket48745_test:ticket48745_test.py:63 Check indexing succeeded with a specified matching rule
Passed tickets/ticket48745_test.py::test_ticket48745_homeDirectory_mixed_value 0.01
No log output captured.
Passed tickets/ticket48745_test.py::test_ticket48745_extensible_search_after_index 0.01
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48745_test:ticket48745_test.py:91 Default: can retrieve an entry filter syntax with exact stored value INFO  tests.tickets.ticket48745_test:ticket48745_test.py:99 Default: can retrieve an entry filter caseExactIA5Match with exact stored value INFO  tests.tickets.ticket48745_test:ticket48745_test.py:106 Default: can not retrieve an entry filter syntax match with lowered stored value INFO  tests.tickets.ticket48745_test:ticket48745_test.py:112 Default: can not retrieve an entry filter caseExactIA5Match with lowered stored value INFO  tests.tickets.ticket48745_test:ticket48745_test.py:119 Default: can retrieve an entry filter caseIgnoreIA5Match with lowered stored value
Passed tickets/ticket48746_test.py::test_ticket48746_init 0.87
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48746_test:ticket48746_test.py:26 Initialization: add dummy entries for the tests
Passed tickets/ticket48746_test.py::test_ticket48746_homeDirectory_indexed_cis 2.02
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48746_test:ticket48746_test.py:39 index homeDirectory in caseIgnoreIA5Match and caseExactIA5Match INFO  tests.tickets.ticket48746_test:ticket48746_test.py:57 successfully checked that filter with exact mr , a filter with lowercase eq is failing INFO  lib389:tasks.py:798 Index task index_attrs_10292020_000947 completed successfully INFO  tests.tickets.ticket48746_test:ticket48746_test.py:63 Check indexing succeeded with a specified matching rule
Passed tickets/ticket48746_test.py::test_ticket48746_homeDirectory_mixed_value 0.00
No log output captured.
Passed tickets/ticket48746_test.py::test_ticket48746_extensible_search_after_index 0.00
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48746_test:ticket48746_test.py:99 Default: can retrieve an entry filter caseExactIA5Match with exact stored value
Passed tickets/ticket48746_test.py::test_ticket48746_homeDirectory_indexed_ces 2.02
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48746_test:ticket48746_test.py:104 index homeDirectory in caseExactIA5Match, this would trigger the crash INFO  tests.tickets.ticket48746_test:ticket48746_test.py:121 successfully checked that filter with exact mr , a filter with lowercase eq is failing INFO  lib389:tasks.py:798 Index task index_attrs_10292020_000949 completed successfully INFO  tests.tickets.ticket48746_test:ticket48746_test.py:127 Check indexing succeeded with a specified matching rule
Passed tickets/ticket48759_test.py::test_ticket48759 20.91
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket48759_test.py:66 !!!!!!! uid=member2,dc=example,dc=com: memberof->b'cn=group,dc=example,dc=com' INFO  lib389:ticket48759_test.py:66 !!!!!!! uid=member2,dc=example,dc=com: memberof->b'cn=group,dc=example,dc=com' INFO  lib389:ticket48759_test.py:66 !!!!!!! uid=member2,dc=example,dc=com: memberof->b'cn=group,dc=example,dc=com'
Passed tickets/ticket48799_test.py::test_ticket48799 15.50
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for consumer1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:169 Joining consumer consumer1 from master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 1801780b-7403-475e-850d-b68afeb94c25 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is working INFO  lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 INFO  lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 already exists
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48799_test:ticket48799_test.py:80 Test complete
Passed tickets/ticket48844_test.py::test_ticket48844_init 1.39
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  lib389:backend.py:80 List backend with suffix=dc=bitwise,dc=com INFO  lib389:backend.py:290 Creating a local backend INFO  lib389:backend.py:76 List backend cn=TestBitw,cn=ldbm database,cn=plugins,cn=config INFO  lib389:__init__.py:1713 Found entry dn: cn=TestBitw,cn=ldbm database,cn=plugins,cn=config cn: TestBitw nsslapd-cachememsize: 512000 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-standalone1/db/TestBitw nsslapd-dncachememsize: 16777216 nsslapd-readonly: off nsslapd-require-index: off nsslapd-require-internalop-index: off nsslapd-suffix: dc=bitwise,dc=com objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance INFO  lib389:mappingTree.py:154 Entry dn: cn="dc=bitwise,dc=com",cn=mapping tree,cn=config cn: dc=bitwise,dc=com nsslapd-backend: TestBitw nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree INFO  lib389:__init__.py:1713 Found entry dn: cn=dc\3Dbitwise\2Cdc\3Dcom,cn=mapping tree,cn=config cn: dc=bitwise,dc=com nsslapd-backend: TestBitw nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree
Passed tickets/ticket48844_test.py::test_ticket48844_bitwise_on 3.53
No log output captured.
Passed tickets/ticket48844_test.py::test_ticket48844_bitwise_off 4.52
No log output captured.
Passed tickets/ticket48891_test.py::test_ticket48891_setup 2.44
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket48891_test.py:43 Bind as cn=Directory Manager INFO  lib389:ticket48891_test.py:52 ######################### SETUP SUFFIX o=ticket48891.org ###################### INFO  lib389:backend.py:80 List backend with suffix=dc=ticket48891.org INFO  lib389:backend.py:290 Creating a local backend INFO  lib389:backend.py:76 List backend cn=ticket48891,cn=ldbm database,cn=plugins,cn=config INFO  lib389:__init__.py:1713 Found entry dn: cn=ticket48891,cn=ldbm database,cn=plugins,cn=config cn: ticket48891 nsslapd-cachememsize: 512000 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-standalone1/db/ticket48891 nsslapd-dncachememsize: 16777216 nsslapd-readonly: off nsslapd-require-index: off nsslapd-require-internalop-index: off nsslapd-suffix: dc=ticket48891.org objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance INFO  lib389:mappingTree.py:154 Entry dn: cn="dc=ticket48891.org",cn=mapping tree,cn=config cn: dc=ticket48891.org nsslapd-backend: ticket48891 nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree INFO  lib389:__init__.py:1713 Found entry dn: cn=dc\3Dticket48891.org,cn=mapping tree,cn=config cn: dc=ticket48891.org nsslapd-backend: ticket48891 nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree INFO  lib389:ticket48891_test.py:61 ######################### Generate Test data ###################### INFO  lib389:ticket48891_test.py:77 ######################### SEARCH ALL ###################### INFO  lib389:ticket48891_test.py:78 Bind as cn=Directory Manager and add the READ/SEARCH SELFDN aci INFO  lib389:ticket48891_test.py:82 Returned 10 entries. INFO  lib389:ticket48891_test.py:86 10 person entries are successfully created under dc=ticket48891.org.
Passed tickets/ticket48893_test.py::test_ticket48893 0.00
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48893_test:ticket48893_test.py:46 Test PASSED
Passed tickets/ticket48906_test.py::test_ticket48906_setup 0.57
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket48906_test.py:63 Bind as cn=Directory Manager INFO  lib389:ticket48906_test.py:83 ######################### SEARCH ALL ###################### INFO  lib389:ticket48906_test.py:84 Bind as cn=Directory Manager and add the READ/SEARCH SELFDN aci INFO  lib389:ticket48906_test.py:88 Returned 10 entries. INFO  lib389:ticket48906_test.py:92 10 person entries are successfully created under dc=example,dc=com.
Passed tickets/ticket48906_test.py::test_ticket48906_dblock_default 0.01
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket48906_test.py:149 ################################### INFO  lib389:ticket48906_test.py:150 ### INFO  lib389:ticket48906_test.py:151 ### Check that before any change config/monitor INFO  lib389:ticket48906_test.py:152 ### contains the default value INFO  lib389:ticket48906_test.py:153 ### INFO  lib389:ticket48906_test.py:154 ###################################
Passed tickets/ticket48906_test.py::test_ticket48906_dblock_ldap_update 3.22
------------------------------Captured stdout call------------------------------
line locks:10000 expected_value 10000 value 10000
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket48906_test.py:160 ################################### INFO  lib389:ticket48906_test.py:161 ### INFO  lib389:ticket48906_test.py:162 ### Check that after ldap update INFO  lib389:ticket48906_test.py:163 ### - monitor contains DEFAULT INFO  lib389:ticket48906_test.py:164 ### - configured contains DBLOCK_LDAP_UPDATE INFO  lib389:ticket48906_test.py:165 ### - After stop dse.ldif contains DBLOCK_LDAP_UPDATE INFO  lib389:ticket48906_test.py:166 ### - After stop guardian contains DEFAULT INFO  lib389:ticket48906_test.py:167 ### In fact guardian should differ from config to recreate the env INFO  lib389:ticket48906_test.py:168 ### Check that after restart (DBenv recreated) INFO  lib389:ticket48906_test.py:169 ### - monitor contains DBLOCK_LDAP_UPDATE INFO  lib389:ticket48906_test.py:170 ### - configured contains DBLOCK_LDAP_UPDATE INFO  lib389:ticket48906_test.py:171 ### - dse.ldif contains DBLOCK_LDAP_UPDATE INFO  lib389:ticket48906_test.py:172 ### INFO  lib389:ticket48906_test.py:173 ###################################
Passed tickets/ticket48906_test.py::test_ticket48906_dblock_edit_update 6.27
------------------------------Captured stdout call------------------------------
line locks:20000 expected_value 20000 value 20000 line locks:40000 expected_value 40000 value 40000
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket48906_test.py:191 ################################### INFO  lib389:ticket48906_test.py:192 ### INFO  lib389:ticket48906_test.py:193 ### Check that after stop INFO  lib389:ticket48906_test.py:194 ### - dse.ldif contains DBLOCK_LDAP_UPDATE INFO  lib389:ticket48906_test.py:195 ### - guardian contains DBLOCK_LDAP_UPDATE INFO  lib389:ticket48906_test.py:196 ### Check that edit dse+restart INFO  lib389:ticket48906_test.py:197 ### - monitor contains DBLOCK_EDIT_UPDATE INFO  lib389:ticket48906_test.py:198 ### - configured contains DBLOCK_EDIT_UPDATE INFO  lib389:ticket48906_test.py:199 ### Check that after stop INFO  lib389:ticket48906_test.py:200 ### - dse.ldif contains DBLOCK_EDIT_UPDATE INFO  lib389:ticket48906_test.py:201 ### - guardian contains DBLOCK_EDIT_UPDATE INFO  lib389:ticket48906_test.py:202 ### INFO  lib389:ticket48906_test.py:203 ###################################
Passed tickets/ticket48906_test.py::test_ticket48906_dblock_robust 4.99
------------------------------Captured stdout call------------------------------
line locks:40000 expected_value 40000 value 40000
-------------------------------Captured log call--------------------------------
INFO  lib389:ticket48906_test.py:245 ################################### INFO  lib389:ticket48906_test.py:246 ### INFO  lib389:ticket48906_test.py:247 ### Check that the following values are rejected INFO  lib389:ticket48906_test.py:248 ### - negative value INFO  lib389:ticket48906_test.py:249 ### - insuffisant value INFO  lib389:ticket48906_test.py:250 ### - invalid value INFO  lib389:ticket48906_test.py:251 ### Check that minimum value is accepted INFO  lib389:ticket48906_test.py:252 ### INFO  lib389:ticket48906_test.py:253 ###################################
Passed tickets/ticket48944_test.py::test_ticket48944 101.54
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for consumer1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for consumer2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39202, 'ldap-secureport': 63902, 'server-id': 'consumer2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect cae34439-5848-49ba-aa5d-fa5ad562e70a / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 4d3f6873-71c9-42d2-b6bc-4f6fa110e2a6 / got description=cae34439-5848-49ba-aa5d-fa5ad562e70a) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:169 Joining consumer consumer1 from master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 4f23a6fb-929a-4b63-9e3a-015348f45df0 / got description=4d3f6873-71c9-42d2-b6bc-4f6fa110e2a6) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is working INFO  lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 INFO  lib389.topologies:topologies.py:169 Joining consumer consumer2 from master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect b7694953-9b23-4228-9fac-276963d68150 / got description=4f23a6fb-929a-4b63-9e3a-015348f45df0) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 is working INFO  lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 INFO  lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 already exists INFO  lib389.topologies:topologies.py:174 Ensuring consumer consumer2 from master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 already exists INFO  lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from master2 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is was created INFO  lib389.topologies:topologies.py:174 Ensuring consumer consumer2 from master2 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39202 is was created
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket48944_test:ticket48944_test.py:108 Ticket 48944 - On a read only replica invalid state info can accumulate INFO  tests.tickets.ticket48944_test:ticket48944_test.py:52 Enable account policy plugin and configure required attributes INFO  tests.tickets.ticket48944_test:ticket48944_test.py:54 Configure Account policy plugin on master1 INFO  tests.tickets.ticket48944_test:ticket48944_test.py:52 Enable account policy plugin and configure required attributes INFO  tests.tickets.ticket48944_test:ticket48944_test.py:54 Configure Account policy plugin on master2 INFO  tests.tickets.ticket48944_test:ticket48944_test.py:52 Enable account policy plugin and configure required attributes INFO  tests.tickets.ticket48944_test:ticket48944_test.py:69 Configure Account policy plugin on consumer1 INFO  tests.tickets.ticket48944_test:ticket48944_test.py:52 Enable account policy plugin and configure required attributes INFO  tests.tickets.ticket48944_test:ticket48944_test.py:69 Configure Account policy plugin on consumer2 INFO  tests.tickets.ticket48944_test:ticket48944_test.py:115 Sleep for 10secs for the server to come up INFO  tests.tickets.ticket48944_test:ticket48944_test.py:117 Add few entries to server and check if entries are replicated INFO  tests.tickets.ticket48944_test:ticket48944_test.py:132 Checking if entries are synced across masters and consumers INFO  tests.tickets.ticket48944_test:ticket48944_test.py:152 Start master2 to sync lastLoginTime attribute from master1 INFO  tests.tickets.ticket48944_test:ticket48944_test.py:155 Stop master1 INFO  tests.tickets.ticket48944_test:ticket48944_test.py:157 Bind as user1 to master2 and check if lastLoginTime attribute is greater than master1 INFO  tests.tickets.ticket48944_test:ticket48944_test.py:161 Start all servers except master1 INFO  tests.tickets.ticket48944_test:ticket48944_test.py:167 Check if consumers are updated with lastLoginTime attribute value from master2 INFO  tests.tickets.ticket48944_test:ticket48944_test.py:174 Check if lastLoginTime update in consumers not synced to master2 INFO  tests.tickets.ticket48944_test:ticket48944_test.py:185 Start master1 and check if its updating its older lastLoginTime attribute to consumers INFO  tests.tickets.ticket48944_test:ticket48944_test.py:194 Check if lastLoginTime update from master2 is synced to all masters and consumers INFO  tests.tickets.ticket48944_test:ticket48944_test.py:202 Checking consumer error logs for replica invalid state info
Passed tickets/ticket49008_test.py::test_ticket49008 51.41
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master3 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'master3', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 1e088f29-a177-41d7-9058-352ac094369c / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 558a8696-971f-41fc-9e3a-8b6bed469f78 / got description=1e088f29-a177-41d7-9058-352ac094369c) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:156 Joining master master3 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect b86255b9-85dd-44ef-b8d6-e7f3de8dffbc / got description=558a8696-971f-41fc-9e3a-8b6bed469f78) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect f023bcaa-7075-4a8c-b834-6519984ddb9b / got description=b86255b9-85dd-44ef-b8d6-e7f3de8dffbc) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master3 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master3 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master2 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49008_test:ticket49008_test.py:115 ruv before fail: b'{replica 2 ldap://localhost.localdomain:39002} 5f9a42d3000000020000 5f9a42fc000300020000' INFO  tests.tickets.ticket49008_test:ticket49008_test.py:116 ruv after fail: b'{replica 2 ldap://localhost.localdomain:39002} 5f9a42d3000000020000 5f9a42fc000300020000'
Passed tickets/ticket49020_test.py::test_ticket49020 33.82
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master3 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'master3', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 8decd573-fc11-4651-800a-94682ba7a920 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 1385cea6-3472-4d08-ba33-95dfe51e40f8 / got description=8decd573-fc11-4651-800a-94682ba7a920) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:156 Joining master master3 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 42f3d920-d8de-4156-9c5b-519a1e1752ce / got description=1385cea6-3472-4d08-ba33-95dfe51e40f8) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 7ef60604-a133-4de2-9bdc-b93f40ea2f76 / got description=42f3d920-d8de-4156-9c5b-519a1e1752ce) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master3 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master3 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master2 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1169 Starting total init cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config
Passed tickets/ticket49076_test.py::test_ticket49076 9.46
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed tickets/ticket49095_test.py::test_ticket49095 0.56
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket49095_test:ticket49095_test.py:79 Test Passed
Passed tickets/ticket49104_test.py::test_ticket49104 0.25
------------------------------Captured stderr call------------------------------
valgrind: /usr/bin/dbscan-bin: No such file or directory grep: /tmp/val49104.out: No such file or directory valgrind: /usr/bin/dbscan-bin: No such file or directory grep: /tmp/val49104.out: No such file or directory valgrind: /usr/bin/dbscan-bin: No such file or directory grep: /tmp/val49104.out: No such file or directory valgrind: /usr/bin/dbscan-bin: No such file or directory grep: /tmp/val49104.out: No such file or directory valgrind: /usr/bin/dbscan-bin: No such file or directory grep: /tmp/val49104.out: No such file or directory valgrind: /usr/bin/dbscan-bin: No such file or directory grep: /tmp/val49104.out: No such file or directory valgrind: /usr/bin/dbscan-bin: No such file or directory grep: /tmp/val49104.out: No such file or directory valgrind: /usr/bin/dbscan-bin: No such file or directory grep: /tmp/val49104.out: No such file or directory valgrind: /usr/bin/dbscan-bin: No such file or directory grep: /tmp/val49104.out: No such file or directory valgrind: /usr/bin/dbscan-bin: No such file or directory grep: /tmp/val49104.out: No such file or directory
Passed tickets/ticket49122_test.py::test_ticket49122 13.94
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket49122_test:ticket49122_test.py:57 Testing filter: nsrole=cn=empty,dc=example,dc=com INFO  tests.tickets.ticket49122_test:ticket49122_test.py:57 Testing filter: (nsrole=cn=empty,dc=example,dc=com) INFO  tests.tickets.ticket49122_test:ticket49122_test.py:57 Testing filter: (&(nsrole=cn=empty,dc=example,dc=com)) INFO  tests.tickets.ticket49122_test:ticket49122_test.py:57 Testing filter: (!(nsrole=cn=empty,dc=example,dc=com)) INFO  tests.tickets.ticket49122_test:ticket49122_test.py:57 Testing filter: (&(|(objectclass=person)(sn=app*))(userpassword=*)) INFO  tests.tickets.ticket49122_test:ticket49122_test.py:57 Testing filter: (&(|(objectclass=person)(nsrole=cn=empty,dc=example,dc=com))(userpassword=*)) INFO  tests.tickets.ticket49122_test:ticket49122_test.py:57 Testing filter: (&(|(nsrole=cn=empty,dc=example,dc=com)(sn=app*))(userpassword=*)) INFO  tests.tickets.ticket49122_test:ticket49122_test.py:57 Testing filter: (&(|(objectclass=person)(sn=app*))(nsrole=cn=empty,dc=example,dc=com)) INFO  tests.tickets.ticket49122_test:ticket49122_test.py:57 Testing filter: (&(|(&(cn=*)(objectclass=person)(nsrole=cn=empty,dc=example,dc=com)))(uid=*)) INFO  lib389:ticket49122_test.py:86 Test Passed
Passed tickets/ticket49180_test.py::test_ticket49180 51.40
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master3 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'master3', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master4 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39004, 'ldap-secureport': 63704, 'server-id': 'master4', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 15c8a8be-3b59-4d58-9f8f-e2f3da5274dd / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 2b4fe496-d61d-4711-abe5-c91820e8b579 / got description=15c8a8be-3b59-4d58-9f8f-e2f3da5274dd) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:156 Joining master master3 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 8a156b45-0b4e-491d-b40b-8dfb5b61d303 / got description=2b4fe496-d61d-4711-abe5-c91820e8b579) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 3d30149c-58ad-46d5-9aee-30a0858f64a8 / got description=8a156b45-0b4e-491d-b40b-8dfb5b61d303) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 INFO  lib389.topologies:topologies.py:156 Joining master master4 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 20e4f14b-a56e-4d00-913f-8799693722e7 / got description=3d30149c-58ad-46d5-9aee-30a0858f64a8) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 832698de-1c02-4f34-93dd-e5639dfb8cbc / got description=20e4f14b-a56e-4d00-913f-8799693722e7) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master3 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master4 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master3 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master4 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master2 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master4 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master4 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master4 to master2 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master4 to master3 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket49180_test:ticket49180_test.py:78 Running test_ticket49180... INFO  tests.tickets.ticket49180_test:ticket49180_test.py:80 Check that replication works properly on all masters INFO  tests.tickets.ticket49180_test:ticket49180_test.py:95 test_clean: disable master 4... INFO  tests.tickets.ticket49180_test:ticket49180_test.py:30 test_clean: remove all the agreements to master 4... INFO  lib389:agreement.py:1095 Agreement (cn=004,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config) was successfully removed INFO  lib389:agreement.py:1095 Agreement (cn=004,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config) was successfully removed INFO  lib389:agreement.py:1095 Agreement (cn=004,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config) was successfully removed INFO  tests.tickets.ticket49180_test:ticket49180_test.py:46 Restoring master 4... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 78e54219-87be-4afb-8812-2b725b501af7 / got description=832698de-1c02-4f34-93dd-e5639dfb8cbc) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 78e66d53-6d79-4537-9ab9-704a42b35dc2 / got description=78e54219-87be-4afb-8812-2b725b501af7) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39004 already exists INFO  tests.tickets.ticket49180_test:ticket49180_test.py:59 Replication is working m1 -> m2. INFO  tests.tickets.ticket49180_test:ticket49180_test.py:59 Replication is working m1 -> m3. INFO  tests.tickets.ticket49180_test:ticket49180_test.py:59 Replication is working m1 -> m4. INFO  tests.tickets.ticket49180_test:ticket49180_test.py:67 Replication is working m4 -> m1. INFO  tests.tickets.ticket49180_test:ticket49180_test.py:73 Master 4 has been successfully restored. INFO  tests.tickets.ticket49180_test:ticket49180_test.py:106 Errors found on m1: 0 INFO  tests.tickets.ticket49180_test:ticket49180_test.py:111 Errors found on m2: 0 INFO  tests.tickets.ticket49180_test:ticket49180_test.py:116 Errors found on m3: 0
Passed tickets/ticket49184_test.py::test_ticket49184 4.70
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket49184_test:ticket49184_test.py:89 create users and group... INFO  tests.tickets.ticket49184_test:ticket49184_test.py:39 Adding members to the group... INFO  tests.tickets.ticket49184_test:ticket49184_test.py:39 Adding members to the group...
Passed tickets/ticket49227_test.py::test_ticket49227 25.88
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed tickets/ticket49249_test.py::test_ticket49249 0.83
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed tickets/ticket49273_test.py::test_49273_corrupt_dbversion 3.47
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed tickets/ticket49290_test.py::test_49290_range_unindexed_notes 4.16
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
------------------------------Captured stderr call------------------------------
[29/Oct/2020:00:29:10.961233008 -0400] - INFO - ldbm_instance_config_cachememsize_set - force a minimal value 512000 [29/Oct/2020:00:29:10.969328354 -0400] - INFO - bdb_instance_start - Import is running with nsslapd-db-private-import-mem on; No other process is allowed to access the database [29/Oct/2020:00:29:10.973020272 -0400] - INFO - check_and_set_import_cache - pagesize: 4096, available bytes 7466754048, process usage 22745088 [29/Oct/2020:00:29:10.976088558 -0400] - INFO - check_and_set_import_cache - Import allocates 2916700KB import cache. [29/Oct/2020:00:29:11.263166499 -0400] - INFO - bdb_db2index - userRoot: Indexing attribute: modifytimestamp [29/Oct/2020:00:29:11.267122895 -0400] - ERR - libdb - BDB1566 txn_checkpoint interface requires an environment configured for the transaction subsystem [29/Oct/2020:00:29:11.270280035 -0400] - ERR - bdb_force_checkpoint - Checkpoint FAILED, error Invalid argument (22) [29/Oct/2020:00:29:11.281996783 -0400] - INFO - bdb_db2index - userRoot: Finished indexing. [29/Oct/2020:00:29:11.311323370 -0400] - INFO - bdb_pre_close - All database threads now stopped
Passed tickets/ticket49386_test.py::test_ticket49386 32.79
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
CRITICAL tests.tickets.ticket49386_test:ticket49386_test.py:28 Adding user (cn=user_0,ou=people,dc=example,dc=com): CRITICAL tests.tickets.ticket49386_test:ticket49386_test.py:28 Adding user (cn=user_1,ou=people,dc=example,dc=com): CRITICAL tests.tickets.ticket49386_test:ticket49386_test.py:28 Adding user (cn=user_2,ou=people,dc=example,dc=com): CRITICAL tests.tickets.ticket49386_test:ticket49386_test.py:28 Adding user (cn=user_3,ou=people,dc=example,dc=com): CRITICAL tests.tickets.ticket49386_test:ticket49386_test.py:28 Adding user (cn=user_4,ou=people,dc=example,dc=com): CRITICAL tests.tickets.ticket49386_test:ticket49386_test.py:28 Adding user (cn=user_5,ou=people,dc=example,dc=com): CRITICAL tests.tickets.ticket49386_test:ticket49386_test.py:28 Adding user (cn=user_6,ou=people,dc=example,dc=com): CRITICAL tests.tickets.ticket49386_test:ticket49386_test.py:28 Adding user (cn=user_7,ou=people,dc=example,dc=com): CRITICAL tests.tickets.ticket49386_test:ticket49386_test.py:28 Adding user (cn=user_8,ou=people,dc=example,dc=com): CRITICAL tests.tickets.ticket49386_test:ticket49386_test.py:28 Adding user (cn=user_9,ou=people,dc=example,dc=com): INFO  lib389:ticket49386_test.py:65 !!!!!!! cn=user_1,ou=people,dc=example,dc=com: memberof->b'cn=group_1,ou=groups,dc=example,dc=com' INFO  lib389:ticket49386_test.py:66 !!!!!!! b'cn=group_1,ou=groups,dc=example,dc=com' INFO  lib389:ticket49386_test.py:67 !!!!!!! cn=group_1,ou=groups,dc=example,dc=com INFO  tests.tickets.ticket49386_test:ticket49386_test.py:130 memberof log found: [29/Oct/2020:00:30:22.201135859 -0400] - DEBUG - memberof-plugin - memberof_postop_modrdn: Skip modrdn operation because src/dst identical cn=group_1,ou=groups,dc=example,dc=com
Passed tickets/ticket49441_test.py::test_ticket49441 8.34
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket49441_test:ticket49441_test.py:35 Position ldif files, and add indexes... INFO  tests.tickets.ticket49441_test:ticket49441_test.py:52 Import LDIF with large indexed binary attributes... ERROR  lib389:tasks.py:495 Error: import task import_10292020_003106 for file /var/lib/dirsrv/slapd-standalone1/ldifbinary.ldif exited with -23 INFO  tests.tickets.ticket49441_test:ticket49441_test.py:61 Verify server is still running... INFO  tests.tickets.ticket49441_test:ticket49441_test.py:68 Test PASSED
Passed tickets/ticket49460_test.py::test_ticket_49460 13.27
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master3 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'master3', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect e6fe582b-114d-4254-b8a8-fd47ee7f34e7 / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect db28f26c-5158-471c-8096-40aa2933e043 / got description=e6fe582b-114d-4254-b8a8-fd47ee7f34e7) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:156 Joining master master3 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 1aa2745d-6750-4b2e-8a66-4685ddcc8996 / got description=db28f26c-5158-471c-8096-40aa2933e043) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 930b614b-f8c0-4df7-8837-e2ee2f13257d / got description=1aa2745d-6750-4b2e-8a66-4685ddcc8996) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 930b614b-f8c0-4df7-8837-e2ee2f13257d / got description=1aa2745d-6750-4b2e-8a66-4685ddcc8996) INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 930b614b-f8c0-4df7-8837-e2ee2f13257d / got description=1aa2745d-6750-4b2e-8a66-4685ddcc8996) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master3 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master3 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master2 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created
-------------------------------Captured log call--------------------------------
CRITICAL tests.tickets.ticket49460_test:ticket49460_test.py:26 Adding user (cn=user11,ou=people,dc=example,dc=com): CRITICAL tests.tickets.ticket49460_test:ticket49460_test.py:26 Adding user (cn=user21,ou=people,dc=example,dc=com): CRITICAL tests.tickets.ticket49460_test:ticket49460_test.py:26 Adding user (cn=user31,ou=people,dc=example,dc=com):
Passed tickets/ticket49471_test.py::test_ticket49471 2.04
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
CRITICAL tests.tickets.ticket49471_test:ticket49471_test.py:28 Adding user (cn=user_1,ou=people,dc=example,dc=com):
Passed tickets/ticket49540_test.py::test_ticket49540 19.06
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket49540_test:ticket49540_test.py:28 index homeDirectory INFO  lib389:tasks.py:798 Index task index_attrs_10292020_003734 completed successfully INFO  tests.tickets.ticket49540_test:ticket49540_test.py:62 check_task_status =========> 0 th loop INFO  tests.tickets.ticket49540_test:ticket49540_test.py:71 cn=index_attrs_10292020_003734,cn=index,cn=tasks,cn=config ---> NO STATUS INFO  tests.tickets.ticket49540_test:ticket49540_test.py:83 =========> Great it was expected in the middle of index INFO  tests.tickets.ticket49540_test:ticket49540_test.py:62 check_task_status =========> 1 th loop INFO  tests.tickets.ticket49540_test:ticket49540_test.py:71 cn=index_attrs_10292020_003734,cn=index,cn=tasks,cn=config ---> NO STATUS INFO  tests.tickets.ticket49540_test:ticket49540_test.py:83 =========> Great it was expected in the middle of index INFO  tests.tickets.ticket49540_test:ticket49540_test.py:62 check_task_status =========> 2 th loop INFO  tests.tickets.ticket49540_test:ticket49540_test.py:71 cn=index_attrs_10292020_003734,cn=index,cn=tasks,cn=config ---> NO STATUS INFO  tests.tickets.ticket49540_test:ticket49540_test.py:83 =========> Great it was expected in the middle of index INFO  tests.tickets.ticket49540_test:ticket49540_test.py:62 check_task_status =========> 3 th loop INFO  tests.tickets.ticket49540_test:ticket49540_test.py:71 cn=index_attrs_10292020_003734,cn=index,cn=tasks,cn=config ---> NO STATUS INFO  tests.tickets.ticket49540_test:ticket49540_test.py:83 =========> Great it was expected in the middle of index INFO  tests.tickets.ticket49540_test:ticket49540_test.py:62 check_task_status =========> 4 th loop INFO  tests.tickets.ticket49540_test:ticket49540_test.py:71 cn=index_attrs_10292020_003734,cn=index,cn=tasks,cn=config ---> NO STATUS INFO  tests.tickets.ticket49540_test:ticket49540_test.py:83 =========> Great it was expected in the middle of index INFO  tests.tickets.ticket49540_test:ticket49540_test.py:62 check_task_status =========> 5 th loop INFO  tests.tickets.ticket49540_test:ticket49540_test.py:71 cn=index_attrs_10292020_003734,cn=index,cn=tasks,cn=config ---> NO STATUS INFO  tests.tickets.ticket49540_test:ticket49540_test.py:83 =========> Great it was expected in the middle of index INFO  tests.tickets.ticket49540_test:ticket49540_test.py:62 check_task_status =========> 6 th loop INFO  tests.tickets.ticket49540_test:ticket49540_test.py:68 cn=index_attrs_10292020_003734,cn=index,cn=tasks,cn=config ---> b'userRoot: Finished indexing.' INFO  tests.tickets.ticket49540_test:ticket49540_test.py:62 check_task_status =========> 7 th loop INFO  tests.tickets.ticket49540_test:ticket49540_test.py:68 cn=index_attrs_10292020_003734,cn=index,cn=tasks,cn=config ---> b'userRoot: Finished indexing.' INFO  tests.tickets.ticket49540_test:ticket49540_test.py:62 check_task_status =========> 8 th loop INFO  tests.tickets.ticket49540_test:ticket49540_test.py:68 cn=index_attrs_10292020_003734,cn=index,cn=tasks,cn=config ---> b'userRoot: Finished indexing.' INFO  tests.tickets.ticket49540_test:ticket49540_test.py:62 check_task_status =========> 9 th loop INFO  tests.tickets.ticket49540_test:ticket49540_test.py:68 cn=index_attrs_10292020_003734,cn=index,cn=tasks,cn=config ---> b'userRoot: Finished indexing.'
Passed tickets/ticket49623_2_test.py::test_modrdn_loop 0.32
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology.
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket49623_2_test:ticket49623_2_test.py:64 Check the log messages for cenotaph error
Passed tickets/ticket49658_test.py::test_ticket49658_init 5.97
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master2 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'master2', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master3 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'master3', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:142 Creating replication topology. INFO  lib389.topologies:topologies.py:156 Joining master master2 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 9b964e5a-fc6d-4311-8ed2-4ada2461928d / got description=None) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect f2104e80-4174-426b-b3f8-a680fa8aae55 / got description=9b964e5a-fc6d-4311-8ed2-4ada2461928d) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 INFO  lib389.topologies:topologies.py:156 Joining master master3 to master1 ... INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is was created INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 01ceeec0-cc8b-41b3-b1c8-d5002d568c80 / got description=f2104e80-4174-426b-b3f8-a680fa8aae55) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is working INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect d4ed1684-a3e6-460d-ad7c-bda0e88c6cfe / got description=01ceeec0-cc8b-41b3-b1c8-d5002d568c80) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 is working INFO  lib389.replica:replica.py:2153 SUCCESS: joined master from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master2 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master1 to master3 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master2 to master3 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 is was created INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master1 ... INFO  lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 already exists INFO  lib389.topologies:topologies.py:164 Ensuring master master3 to master2 ... INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39002 is was created
-------------------------------Captured log call--------------------------------
CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=0,ou=distinguished,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=0,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=1,ou=distinguished,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=1,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=2,ou=distinguished,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=2,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=3,ou=distinguished,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=3,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=4,ou=distinguished,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=4,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=5,ou=distinguished,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=5,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=6,ou=distinguished,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=6,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=7,ou=distinguished,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=7,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=8,ou=distinguished,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=8,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=9,ou=distinguished,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=9,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=10,ou=distinguished,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=10,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=11,ou=distinguished,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=11,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=12,ou=distinguished,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=12,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=13,ou=distinguished,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=13,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=14,ou=distinguished,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=14,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=15,ou=distinguished,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=15,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=16,ou=distinguished,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=16,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=17,ou=distinguished,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=17,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=18,ou=distinguished,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=18,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=19,ou=distinguished,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=19,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_0,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_0,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_1,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_1,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_2,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_2,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_3,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_3,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_4,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_4,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_5,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_5,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_6,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_6,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_7,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_7,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_8,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_8,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_9,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_9,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_10,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_10,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_11,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_11,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_12,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_12,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_13,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_13,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_14,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_14,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_15,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_15,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_16,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_16,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_17,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_17,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_18,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_18,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_19,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_19,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_20,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_20,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_21,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_21,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_22,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_22,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_23,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_23,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_24,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_24,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_25,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_25,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_26,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_26,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_27,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_27,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_28,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_28,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_29,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_29,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_30,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_30,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_31,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_31,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_32,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_32,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_33,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_33,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_34,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_34,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_35,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_35,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_36,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_36,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_37,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_37,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_38,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_38,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_39,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_39,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_40,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_40,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_41,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_41,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_42,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_42,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_43,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_43,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_44,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_44,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_45,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_45,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_46,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_46,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_47,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_47,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_48,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_48,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_49,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_49,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_50,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_50,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_51,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_51,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_52,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_52,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_53,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_53,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_54,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_54,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_55,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_55,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_56,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_56,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_57,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_57,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_58,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_58,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_59,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_59,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_60,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_60,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_61,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_61,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_62,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_62,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_63,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_63,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_64,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_64,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_65,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_65,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_66,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_66,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_67,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_67,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_68,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_68,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_69,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_69,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_70,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_70,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_71,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_71,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_72,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_72,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_73,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_73,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_74,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_74,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_75,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_75,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_76,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_76,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_77,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_77,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_78,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_78,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_79,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_79,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_80,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_80,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_81,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_81,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_82,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_82,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_83,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_83,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_84,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_84,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_85,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_85,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_86,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_86,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_87,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_87,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_88,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_88,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_89,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_89,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_90,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_90,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_91,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_91,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_92,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_92,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_93,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_93,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_94,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_94,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_95,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_95,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_96,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_96,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_97,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_97,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_98,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_98,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_99,ou=regular,ou=people,dc=example,dc=com): INFO  tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_99,ou=regular,ou=people,dc=example,dc=com on M3
Passed tickets/ticket49658_test.py::test_ticket49658_0 33.46
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49658_test:ticket49658_test.py:289 Search M1 employeeNumber=b'0' (vs. b'0') INFO  tests.tickets.ticket49658_test:ticket49658_test.py:295 Search M2 employeeNumber=b'0' (vs. b'0') INFO  tests.tickets.ticket49658_test:ticket49658_test.py:303 Search M3 employeeNumber=b'0' (vs. b'0')
Passed tickets/ticket49658_test.py::test_ticket49658_1 33.55
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49658_test:ticket49658_test.py:395 Search M1 employeeNumber=b'1' (vs. b'1') INFO  tests.tickets.ticket49658_test:ticket49658_test.py:401 Search M2 employeeNumber=b'1' (vs. b'1') INFO  tests.tickets.ticket49658_test:ticket49658_test.py:409 Search M3 employeeNumber=b'1' (vs. b'1')
Passed tickets/ticket49658_test.py::test_ticket49658_2 33.46
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49658_test:ticket49658_test.py:501 Search M1 employeeNumber=b'2' (vs. b'2') INFO  tests.tickets.ticket49658_test:ticket49658_test.py:507 Search M2 employeeNumber=b'2' (vs. b'2') INFO  tests.tickets.ticket49658_test:ticket49658_test.py:515 Search M3 employeeNumber=b'2' (vs. b'2')
Passed tickets/ticket49658_test.py::test_ticket49658_3 33.57
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49658_test:ticket49658_test.py:607 Search M1 employeeNumber=b'3' (vs. b'3') INFO  tests.tickets.ticket49658_test:ticket49658_test.py:613 Search M2 employeeNumber=b'3' (vs. b'3') INFO  tests.tickets.ticket49658_test:ticket49658_test.py:621 Search M3 employeeNumber=b'3' (vs. b'3')
Passed tickets/ticket49658_test.py::test_ticket49658_4 33.56
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49658_test:ticket49658_test.py:713 Search M1 employeeNumber=b'4' (vs. b'4') INFO  tests.tickets.ticket49658_test:ticket49658_test.py:719 Search M2 employeeNumber=b'4' (vs. b'4') INFO  tests.tickets.ticket49658_test:ticket49658_test.py:727 Search M3 employeeNumber=b'4' (vs. b'4')
Passed tickets/ticket49658_test.py::test_ticket49658_5 33.57
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49658_test:ticket49658_test.py:819 Search M1 employeeNumber=b'5' (vs. b'5') INFO  tests.tickets.ticket49658_test:ticket49658_test.py:825 Search M2 employeeNumber=b'5' (vs. b'5') INFO  tests.tickets.ticket49658_test:ticket49658_test.py:833 Search M3 employeeNumber=b'5' (vs. b'5')
Passed tickets/ticket49658_test.py::test_ticket49658_6 33.57
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49658_test:ticket49658_test.py:934 Search M1 employeeNumber=b'6.2' (vs. 6.2) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:940 Search M2 employeeNumber=b'6.2' (vs. 6.2) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:948 Search M3 employeeNumber=b'6.2' (vs. 6.2)
Passed tickets/ticket49658_test.py::test_ticket49658_7 33.57
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49658_test:ticket49658_test.py:1042 Search M1 employeeNumber=b'7.2' (vs. 7.2) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:1048 Search M2 employeeNumber=b'7.2' (vs. 7.2) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:1056 Search M3 employeeNumber=b'7.2' (vs. 7.2)
Passed tickets/ticket49658_test.py::test_ticket49658_8 33.55
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49658_test:ticket49658_test.py:1150 Search M1 employeeNumber=b'8.2' (vs. 8.2) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:1156 Search M2 employeeNumber=b'8.2' (vs. 8.2) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:1164 Search M3 employeeNumber=b'8.2' (vs. 8.2)
Passed tickets/ticket49658_test.py::test_ticket49658_9 33.58
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49658_test:ticket49658_test.py:1259 Search M1 employeeNumber=b'9.2' (vs. 9.2) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:1265 Search M2 employeeNumber=b'9.2' (vs. 9.2) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:1273 Search M3 employeeNumber=b'9.2' (vs. 9.2)
Passed tickets/ticket49658_test.py::test_ticket49658_10 33.57
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49658_test:ticket49658_test.py:1369 Search M1 employeeNumber=b'10.2' (vs. 10.2) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:1375 Search M2 employeeNumber=b'10.2' (vs. 10.2) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:1383 Search M3 employeeNumber=b'10.2' (vs. 10.2)
Passed tickets/ticket49658_test.py::test_ticket49658_11 33.60
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49658_test:ticket49658_test.py:1479 Search M1 employeeNumber=b'11.1' (vs. 11.1) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:1485 Search M2 employeeNumber=b'11.1' (vs. 11.1) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:1493 Search M3 employeeNumber=b'11.1' (vs. 11.1)
Passed tickets/ticket49658_test.py::test_ticket49658_12 33.56
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49658_test:ticket49658_test.py:1594 Search M1 employeeNumber=b'12.1' (vs. 12.1) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:1600 Search M2 employeeNumber=b'12.1' (vs. 12.1) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:1608 Search M3 employeeNumber=b'12.1' (vs. 12.1)
Passed tickets/ticket49658_test.py::test_ticket49658_13 33.56
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49658_test:ticket49658_test.py:1709 Search M1 employeeNumber=b'13.1' (vs. 13.1) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:1715 Search M2 employeeNumber=b'13.1' (vs. 13.1) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:1723 Search M3 employeeNumber=b'13.1' (vs. 13.1)
Passed tickets/ticket49658_test.py::test_ticket49658_14 33.56
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49658_test:ticket49658_test.py:1825 Search M1 employeeNumber=b'14.1' (vs. 14.1) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:1831 Search M2 employeeNumber=b'14.1' (vs. 14.1) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:1839 Search M3 employeeNumber=b'14.1' (vs. 14.1)
Passed tickets/ticket49658_test.py::test_ticket49658_15 33.47
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49658_test:ticket49658_test.py:1940 Search M1 employeeNumber=b'15.1' (vs. 15.1) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:1946 Search M2 employeeNumber=b'15.1' (vs. 15.1) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:1954 Search M3 employeeNumber=b'15.1' (vs. 15.1)
Passed tickets/ticket49658_test.py::test_ticket49658_16 38.13
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49658_test:ticket49658_test.py:2076 Search M1 employeeNumber=b'1.1' (vs. 1.1) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:2082 Search M2 employeeNumber=b'1.1' (vs. 1.1) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:2090 Search M3 employeeNumber=b'1.1' (vs. 1.1)
Passed tickets/ticket49658_test.py::test_ticket49658_17 37.49
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49658_test:ticket49658_test.py:2183 Search M1 employeeNumber=b'2.2' (vs. 2.2) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:2189 Search M2 employeeNumber=b'2.2' (vs. 2.2) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:2197 Search M3 employeeNumber=b'2.2' (vs. 2.2)
Passed tickets/ticket49658_test.py::test_ticket49658_18 37.61
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49658_test:ticket49658_test.py:2289 Search M1 employeeNumber=b'3.2' (vs. 3.2) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:2295 Search M2 employeeNumber=b'3.2' (vs. 3.2) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:2303 Search M3 employeeNumber=b'3.2' (vs. 3.2)
Passed tickets/ticket49658_test.py::test_ticket49658_19 38.63
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49658_test:ticket49658_test.py:2405 Search M1 employeeNumber=b'4.1' (vs. 4.1) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:2411 Search M2 employeeNumber=b'4.1' (vs. 4.1) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:2419 Search M3 employeeNumber=b'4.1' (vs. 4.1)
Passed tickets/ticket49658_test.py::test_ticket49658_20 38.61
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49658_test:ticket49658_test.py:2520 Search M1 employeeNumber=b'5.1' (vs. 5.1) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:2526 Search M2 employeeNumber=b'5.1' (vs. 5.1) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:2534 Search M3 employeeNumber=b'5.1' (vs. 5.1)
Passed tickets/ticket49658_test.py::test_ticket49658_21 38.61
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49658_test:ticket49658_test.py:2635 Search M1 employeeNumber=b'6.1' (vs. 6.1) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:2641 Search M2 employeeNumber=b'6.1' (vs. 6.1) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:2649 Search M3 employeeNumber=b'6.1' (vs. 6.1)
Passed tickets/ticket49658_test.py::test_ticket49658_22 38.60
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49658_test:ticket49658_test.py:2750 Search M1 employeeNumber=b'7.1' (vs. 7.1) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:2756 Search M2 employeeNumber=b'7.1' (vs. 7.1) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:2764 Search M3 employeeNumber=b'7.1' (vs. 7.1)
Passed tickets/ticket49658_test.py::test_ticket49658_23 39.62
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49658_test:ticket49658_test.py:2873 Search M1 employeeNumber=b'8.2' (vs. 8.2) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:2879 Search M2 employeeNumber=b'8.2' (vs. 8.2) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:2887 Search M3 employeeNumber=b'8.2' (vs. 8.2)
Passed tickets/ticket49658_test.py::test_ticket49658_24 40.01
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49658_test:ticket49658_test.py:2996 Search M1 employeeNumber=b'9.2' (vs. 9.2) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:3002 Search M2 employeeNumber=b'9.2' (vs. 9.2) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:3010 Search M3 employeeNumber=b'9.2' (vs. 9.2)
Passed tickets/ticket49658_test.py::test_ticket49658_25 39.62
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49658_test:ticket49658_test.py:3119 Search M1 employeeNumber=b'10.2' (vs. 10.2) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:3125 Search M2 employeeNumber=b'10.2' (vs. 10.2) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:3133 Search M3 employeeNumber=b'10.2' (vs. 10.2)
Passed tickets/ticket49658_test.py::test_ticket49658_26 39.53
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49658_test:ticket49658_test.py:3242 Search M1 employeeNumber=b'11.2' (vs. 11.2) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:3248 Search M2 employeeNumber=b'11.2' (vs. 11.2) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:3256 Search M3 employeeNumber=b'11.2' (vs. 11.2)
Passed tickets/ticket49658_test.py::test_ticket49658_27 39.63
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49658_test:ticket49658_test.py:3365 Search M1 employeeNumber=b'12.2' (vs. 12.2) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:3371 Search M2 employeeNumber=b'12.2' (vs. 12.2) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:3379 Search M3 employeeNumber=b'12.2' (vs. 12.2)
Passed tickets/ticket49658_test.py::test_ticket49658_28 39.62
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49658_test:ticket49658_test.py:3488 Search M1 employeeNumber=b'13.2' (vs. 13.2) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:3494 Search M2 employeeNumber=b'13.2' (vs. 13.2) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:3502 Search M3 employeeNumber=b'13.2' (vs. 13.2)
Passed tickets/ticket49658_test.py::test_ticket49658_29 39.90
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49658_test:ticket49658_test.py:3612 Search M1 employeeNumber=b'14.2' (vs. 14.2) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:3618 Search M2 employeeNumber=b'14.2' (vs. 14.2) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:3626 Search M3 employeeNumber=b'14.2' (vs. 14.2)
Passed tickets/ticket49658_test.py::test_ticket49658_30 39.76
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49658_test:ticket49658_test.py:3735 Search M1 employeeNumber=b'15.2' (vs. 15.2) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:3741 Search M2 employeeNumber=b'15.2' (vs. 15.2) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:3749 Search M3 employeeNumber=b'15.2' (vs. 15.2)
Passed tickets/ticket49658_test.py::test_ticket49658_31 40.63
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49658_test:ticket49658_test.py:3866 Search M1 employeeNumber=b'16.1' (vs. 16.1) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:3872 Search M2 employeeNumber=b'16.1' (vs. 16.1) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:3880 Search M3 employeeNumber=b'16.1' (vs. 16.1)
Passed tickets/ticket49658_test.py::test_ticket49658_32 40.64
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49658_test:ticket49658_test.py:3998 Search M1 employeeNumber=b'17.1' (vs. 17.1) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:4004 Search M2 employeeNumber=b'17.1' (vs. 17.1) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:4012 Search M3 employeeNumber=b'17.1' (vs. 17.1)
Passed tickets/ticket49658_test.py::test_ticket49658_33 40.64
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49658_test:ticket49658_test.py:4122 Search M1 employeeNumber=b'18.1' (vs. 18.1) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:4128 Search M2 employeeNumber=b'18.1' (vs. 18.1) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:4136 Search M3 employeeNumber=b'18.1' (vs. 18.1)
Passed tickets/ticket49658_test.py::test_ticket49658_34 40.62
-------------------------------Captured log call--------------------------------
INFO  lib389:agreement.py:1194 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1194 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  lib389:agreement.py:1220 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO  tests.tickets.ticket49658_test:ticket49658_test.py:4246 Search M1 employeeNumber=b'19.1' (vs. 19.1) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:4252 Search M2 employeeNumber=b'19.1' (vs. 19.1) INFO  tests.tickets.ticket49658_test:ticket49658_test.py:4260 Search M3 employeeNumber=b'19.1' (vs. 19.1)
Passed tickets/ticket49788_test.py::test_ticket49781 0.83
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.
Passed tickets/ticket50078_test.py::test_ticket50078 4.36
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for master1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'master1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for hub1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39101, 'ldap-secureport': 63801, 'server-id': 'hub1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for consumer1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. INFO  lib389.topologies:topologies.py:524 Creating replication topology. INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39101 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39101 is was created INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39101 is working INFO  lib389.replica:replica.py:2211 SUCCESS: joined consumer from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39101 INFO  lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 completed INFO  lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39101 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is was created INFO  lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39101 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 INFO  lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 0578c41f-f1c0-4e5c-8324-136ebd97154d / got description=4f9afdca-4947-4b04-a349-918a742e4d43) INFO  lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-136-251.hosted.upshift.rdu2.redhat.com:39201 is working
-------------------------------Captured log call--------------------------------
INFO  tests.tickets.ticket50078_test:ticket50078_test.py:31 Replication is working. INFO  tests.tickets.ticket50078_test:ticket50078_test.py:47 Rename the test entry test_user1... INFO  tests.tickets.ticket50078_test:ticket50078_test.py:52 Replication is working.
Passed tickets/ticket50234_test.py::test_ticket50234 0.86
-------------------------------Captured log setup-------------------------------
INFO  lib389.SetupDs:setup.py:658 Starting installation... INFO  lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO  lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created.