· 6 years ago · Mar 10, 2020, 04:02 PM
1$ cat templates/docker-registry.yaml
2# Generated with the following on 2020-02-12T13:16:46.168473
3#
4# openstack tripleo container image prepare -e /home/stack/templates/local_images-16.yaml --output-env-file /home/stack/templates/docker-registry.yaml
5#
6
7parameter_defaults:
8 AlertManagerContainerImage: 172.16.0.1:8787/openshift4/ose-prometheus-alertmanager:4.1
9 ContainerAodhApiImage: 172.16.0.1:8787/rhosp-rhel8/openstack-aodh-api:16.0
10 ContainerAodhConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-aodh-api:16.0
11 ContainerAodhEvaluatorImage: 172.16.0.1:8787/rhosp-rhel8/openstack-aodh-evaluator:16.0
12 ContainerAodhListenerImage: 172.16.0.1:8787/rhosp-rhel8/openstack-aodh-listener:16.0
13 ContainerAodhNotifierImage: 172.16.0.1:8787/rhosp-rhel8/openstack-aodh-notifier:16.0
14 ContainerBarbicanApiImage: 172.16.0.1:8787/rhosp-rhel8/openstack-barbican-api:16.0
15 ContainerBarbicanConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-barbican-api:16.0
16 ContainerBarbicanKeystoneListenerConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-barbican-keystone-listener:16.0
17 ContainerBarbicanKeystoneListenerImage: 172.16.0.1:8787/rhosp-rhel8/openstack-barbican-keystone-listener:16.0
18 ContainerBarbicanWorkerConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-barbican-worker:16.0
19 ContainerBarbicanWorkerImage: 172.16.0.1:8787/rhosp-rhel8/openstack-barbican-worker:16.0
20 ContainerCeilometerCentralImage: 172.16.0.1:8787/rhosp-rhel8/openstack-ceilometer-central:16.0
21 ContainerCeilometerComputeImage: 172.16.0.1:8787/rhosp-rhel8/openstack-ceilometer-compute:16.0
22 ContainerCeilometerConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-ceilometer-central:16.0
23 ContainerCeilometerNotificationImage: 172.16.0.1:8787/rhosp-rhel8/openstack-ceilometer-notification:16.0
24 ContainerCephDaemonImage: 172.16.0.1:8787/rhceph/rhceph-4-rhel8:latest
25 ContainerCinderApiImage: 172.16.0.1:8787/rhosp-rhel8/openstack-cinder-api:16.0
26 ContainerCinderBackupImage: 172.16.0.1:8787/rhosp-rhel8/openstack-cinder-backup:16.0
27 ContainerCinderConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-cinder-api:16.0
28 ContainerCinderSchedulerImage: 172.16.0.1:8787/rhosp-rhel8/openstack-cinder-scheduler:16.0
29 ContainerCinderVolumeImage: 172.16.0.1:8787/rhosp-rhel8/openstack-cinder-volume:16.0
30 ContainerClustercheckConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-mariadb:16.0
31 ContainerClustercheckImage: 172.16.0.1:8787/rhosp-rhel8/openstack-mariadb:16.0
32 ContainerCollectdConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-collectd:16.0
33 ContainerCollectdImage: 172.16.0.1:8787/rhosp-rhel8/openstack-collectd:16.0
34 ContainerCrondConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-cron:16.0
35 ContainerCrondImage: 172.16.0.1:8787/rhosp-rhel8/openstack-cron:16.0
36 ContainerEc2ApiConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-ec2-api:16.0
37 ContainerEc2ApiImage: 172.16.0.1:8787/rhosp-rhel8/openstack-ec2-api:16.0
38 ContainerEtcdConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-etcd:16.0
39 ContainerEtcdImage: 172.16.0.1:8787/rhosp-rhel8/openstack-etcd:16.0
40 ContainerGlanceApiConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-glance-api:16.0
41 ContainerGlanceApiImage: 172.16.0.1:8787/rhosp-rhel8/openstack-glance-api:16.0
42 ContainerGnocchiApiImage: 172.16.0.1:8787/rhosp-rhel8/openstack-gnocchi-api:16.0
43 ContainerGnocchiConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-gnocchi-api:16.0
44 ContainerGnocchiMetricdImage: 172.16.0.1:8787/rhosp-rhel8/openstack-gnocchi-metricd:16.0
45 ContainerGnocchiStatsdImage: 172.16.0.1:8787/rhosp-rhel8/openstack-gnocchi-statsd:16.0
46 ContainerHAProxyConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-haproxy:16.0
47 ContainerHAProxyImage: 172.16.0.1:8787/rhosp-rhel8/openstack-haproxy:16.0
48 ContainerHeatApiCfnConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-heat-api-cfn:16.0
49 ContainerHeatApiCfnImage: 172.16.0.1:8787/rhosp-rhel8/openstack-heat-api-cfn:16.0
50 ContainerHeatApiConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-heat-api:16.0
51 ContainerHeatApiImage: 172.16.0.1:8787/rhosp-rhel8/openstack-heat-api:16.0
52 ContainerHeatConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-heat-api:16.0
53 ContainerHeatEngineImage: 172.16.0.1:8787/rhosp-rhel8/openstack-heat-engine:16.0
54 ContainerHorizonConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-horizon:16.0
55 ContainerHorizonImage: 172.16.0.1:8787/rhosp-rhel8/openstack-horizon:16.0
56 ContainerIronicApiConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-ironic-api:16.0
57 ContainerIronicApiImage: 172.16.0.1:8787/rhosp-rhel8/openstack-ironic-api:16.0
58 ContainerIronicConductorImage: 172.16.0.1:8787/rhosp-rhel8/openstack-ironic-conductor:16.0
59 ContainerIronicConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-ironic-pxe:16.0
60 ContainerIronicInspectorConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-ironic-inspector:16.0
61 ContainerIronicInspectorImage: 172.16.0.1:8787/rhosp-rhel8/openstack-ironic-inspector:16.0
62 ContainerIronicNeutronAgentImage: 172.16.0.1:8787/rhosp-rhel8/openstack-ironic-neutron-agent:16.0
63 ContainerIronicPxeImage: 172.16.0.1:8787/rhosp-rhel8/openstack-ironic-pxe:16.0
64 ContainerIscsidConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-iscsid:16.0
65 ContainerIscsidImage: 172.16.0.1:8787/rhosp-rhel8/openstack-iscsid:16.0
66 ContainerKeepalivedConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-keepalived:16.0
67 ContainerKeepalivedImage: 172.16.0.1:8787/rhosp-rhel8/openstack-keepalived:16.0
68 ContainerKeystoneConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-keystone:16.0
69 ContainerKeystoneImage: 172.16.0.1:8787/rhosp-rhel8/openstack-keystone:16.0
70 ContainerManilaApiImage: 172.16.0.1:8787/rhosp-rhel8/openstack-manila-api:16.0
71 ContainerManilaConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-manila-api:16.0
72 ContainerManilaSchedulerImage: 172.16.0.1:8787/rhosp-rhel8/openstack-manila-scheduler:16.0
73 ContainerManilaShareImage: 172.16.0.1:8787/rhosp-rhel8/openstack-manila-share:16.0
74 ContainerMemcachedConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-memcached:16.0
75 ContainerMemcachedImage: 172.16.0.1:8787/rhosp-rhel8/openstack-memcached:16.0
76 ContainerMetricsQdrConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-qdrouterd:16.0
77 ContainerMetricsQdrImage: 172.16.0.1:8787/rhosp-rhel8/openstack-qdrouterd:16.0
78 ContainerMistralApiImage: 172.16.0.1:8787/rhosp-rhel8/openstack-mistral-api:16.0
79 ContainerMistralConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-mistral-api:16.0
80 ContainerMistralEngineImage: 172.16.0.1:8787/rhosp-rhel8/openstack-mistral-engine:16.0
81 ContainerMistralEventEngineImage: 172.16.0.1:8787/rhosp-rhel8/openstack-mistral-event-engine:16.0
82 ContainerMistralExecutorImage: 172.16.0.1:8787/rhosp-rhel8/openstack-mistral-executor:16.0
83 ContainerMultipathdConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-multipathd:16.0
84 ContainerMultipathdImage: 172.16.0.1:8787/rhosp-rhel8/openstack-multipathd:16.0
85 ContainerMysqlClientConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-mariadb:16.0
86 ContainerMysqlConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-mariadb:16.0
87 ContainerMysqlImage: 172.16.0.1:8787/rhosp-rhel8/openstack-mariadb:16.0
88 ContainerNeutronApiImage: 172.16.0.1:8787/rhosp-rhel8/openstack-neutron-server-ovn:16.0
89 ContainerNeutronConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-neutron-server-ovn:16.0
90 ContainerNeutronDHCPImage: 172.16.0.1:8787/rhosp-rhel8/openstack-neutron-dhcp-agent:16.0
91 ContainerNeutronL3AgentImage: 172.16.0.1:8787/rhosp-rhel8/openstack-neutron-l3-agent:16.0
92 ContainerNeutronMetadataImage: 172.16.0.1:8787/rhosp-rhel8/openstack-neutron-metadata-agent:16.0
93 ContainerNovaApiImage: 172.16.0.1:8787/rhosp-rhel8/openstack-nova-api:16.0
94 ContainerNovaComputeImage: 172.16.0.1:8787/rhosp-rhel8/openstack-nova-compute:16.0
95 ContainerNovaComputeIronicImage: 172.16.0.1:8787/rhosp-rhel8/openstack-nova-compute-ironic:16.0
96 ContainerNovaConductorImage: 172.16.0.1:8787/rhosp-rhel8/openstack-nova-conductor:16.0
97 ContainerNovaConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-nova-api:16.0
98 ContainerNovaLibvirtConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-nova-compute:16.0
99 ContainerNovaLibvirtImage: 172.16.0.1:8787/rhosp-rhel8/openstack-nova-libvirt:16.0
100 ContainerNovaMetadataConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-nova-api:16.0
101 ContainerNovaMetadataImage: 172.16.0.1:8787/rhosp-rhel8/openstack-nova-api:16.0
102 ContainerNovaSchedulerImage: 172.16.0.1:8787/rhosp-rhel8/openstack-nova-scheduler:16.0
103 ContainerNovaVncProxyImage: 172.16.0.1:8787/rhosp-rhel8/openstack-nova-novncproxy:16.0
104 ContainerOctaviaApiImage: 172.16.0.1:8787/rhosp-rhel8/openstack-octavia-api:16.0
105 ContainerOctaviaConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-octavia-api:16.0
106 ContainerOctaviaDriverAgentConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-octavia-api:16.0
107 ContainerOctaviaDriverAgentImage: 172.16.0.1:8787/rhosp-rhel8/openstack-octavia-api:16.0
108 ContainerOctaviaHealthManagerImage: 172.16.0.1:8787/rhosp-rhel8/openstack-octavia-health-manager:16.0
109 ContainerOctaviaHousekeepingImage: 172.16.0.1:8787/rhosp-rhel8/openstack-octavia-housekeeping:16.0
110 ContainerOctaviaWorkerImage: 172.16.0.1:8787/rhosp-rhel8/openstack-octavia-worker:16.0
111 ContainerOpenvswitchImage: 172.16.0.1:8787/rhosp-rhel8/openstack-neutron-openvswitch-agent:16.0
112 ContainerOvnControllerConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-ovn-controller:16.0
113 ContainerOvnControllerImage: 172.16.0.1:8787/rhosp-rhel8/openstack-ovn-controller:16.0
114 ContainerOvnDbsConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-ovn-northd:16.0
115 ContainerOvnDbsImage: 172.16.0.1:8787/rhosp-rhel8/openstack-ovn-northd:16.0
116 ContainerOvnMetadataImage: 172.16.0.1:8787/rhosp-rhel8/openstack-neutron-metadata-agent-ovn:16.0
117 ContainerOvnNbDbImage: 172.16.0.1:8787/rhosp-rhel8/openstack-ovn-nb-db-server:16.0
118 ContainerOvnNorthdImage: 172.16.0.1:8787/rhosp-rhel8/openstack-ovn-northd:16.0
119 ContainerOvnSbDbImage: 172.16.0.1:8787/rhosp-rhel8/openstack-ovn-sb-db-server:16.0
120 ContainerPankoApiImage: 172.16.0.1:8787/rhosp-rhel8/openstack-panko-api:16.0
121 ContainerPankoConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-panko-api:16.0
122 ContainerPlacementConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-placement-api:16.0
123 ContainerPlacementImage: 172.16.0.1:8787/rhosp-rhel8/openstack-placement-api:16.0
124 ContainerQdrouterdConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-qdrouterd:16.0
125 ContainerQdrouterdImage: 172.16.0.1:8787/rhosp-rhel8/openstack-qdrouterd:16.0
126 ContainerRabbitmqConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-rabbitmq:16.0
127 ContainerRabbitmqImage: 172.16.0.1:8787/rhosp-rhel8/openstack-rabbitmq:16.0
128 ContainerRedisConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-redis:16.0
129 ContainerRedisImage: 172.16.0.1:8787/rhosp-rhel8/openstack-redis:16.0
130 ContainerRsyslogConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-rsyslog:16.0
131 ContainerRsyslogImage: 172.16.0.1:8787/rhosp-rhel8/openstack-rsyslog:16.0
132 ContainerRsyslogSidecarConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-rsyslog:16.0
133 ContainerRsyslogSidecarImage: 172.16.0.1:8787/rhosp-rhel8/openstack-rsyslog:16.0
134 ContainerSwiftAccountImage: 172.16.0.1:8787/rhosp-rhel8/openstack-swift-account:16.0
135 ContainerSwiftConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-swift-proxy-server:16.0
136 ContainerSwiftContainerImage: 172.16.0.1:8787/rhosp-rhel8/openstack-swift-container:16.0
137 ContainerSwiftObjectImage: 172.16.0.1:8787/rhosp-rhel8/openstack-swift-object:16.0
138 ContainerSwiftProxyImage: 172.16.0.1:8787/rhosp-rhel8/openstack-swift-proxy-server:16.0
139 ContainerZaqarConfigImage: 172.16.0.1:8787/rhosp-rhel8/openstack-zaqar-wsgi:16.0
140 ContainerZaqarImage: 172.16.0.1:8787/rhosp-rhel8/openstack-zaqar-wsgi:16.0
141 DockerInsecureRegistryAddress:
142 - 172.16.0.1:8787
143 GrafanaContainerImage: 172.16.0.1:8787/openshift4/ose-grafana:4.1
144 NodeExporterContainerImage: 172.16.0.1:8787/openshift4/ose-prometheus-node-exporter:v4.1
145 PrometheusContainerImage: 172.16.0.1:8787/openshift4/ose-prometheus:4.1
146
147
148$ ./templates/osp-16/overcloud-deploy-spl.sh 16
149START with options: overcloud deploy --timeout 360 --templates /usr/share/openstack-tripleo-heat-templates --verbose -n /home/stack/templates/osp-16/network_data_spl.yaml -r /home/stack/templates/osp-16/roles_data_spl.yaml -e /home/stack/templates/docker-registry.yaml -e /home/stack/templates/osp-16/node-count-spl.yaml -e /home/stack/templates/environments/global-config.yaml -e /usr/share/openstack-tripleo-heat-templates/environments/network-environment.yaml -e /home/stack/templates/osp-16/network-environment-spl.yaml -e /usr/share/openstack-tripleo-heat-templates/environments/network-isolation.yaml -e /usr/share/openstack-tripleo-heat-templates/environments/ceph-ansible/ceph-ansible.yaml -e /home/stack/templates/osp-16/ceph-config.yaml -e /home/stack/templates/environments/25-hostname-map.yaml -e /home/stack/templates/environments/30-fixed-ip-vips.yaml -e /home/stack/templates/osp-16/ips-from-pool-all-spl.yaml -e /home/stack/templates/environments/55-rsvd_host_memory.yaml --log-file /home/stack/overcloud-deploy.log
150command: overcloud deploy -> tripleoclient.v1.overcloud_deploy.DeployOvercloud (auth=True)
151Using auth plugin: password
152No stack found, will be doing a stack create
153Removing the current plan files
154Uploading new plan files
155Temporary Swift GET/PUT URL parameters have successfully been updated.
156Temporary Swift GET/PUT URL parameters have successfully been updated.
157The backup of the ceph-ansible fetch directory did not need to be renamed
158Plan updated.
159Processing templates in the directory /tmp/tripleoclient-3jt0ow20/tripleo-heat-templates
160WARNING: Following parameter(s) are defined but not currently used in the deployment plan. These parameters may be valid but not in use due to the service or deployment configuration. AlertManagerContainerImage, BondInterfaceOvsOptions, CephEdge1ControlPlaneSubnet, CephEdge1SchedulerHints, CephEdge2ControlPlaneSubnet, CinderBackupBackend, ComputeEdge1ControlPlaneSubnet, ComputeEdge1IPs, ComputeEdge1SchedulerHints, ComputeEdge2ControlPlaneSubnet, ComputeEdge2IPs, ComputeEdge2SchedulerHints, ComputeEdge3SchedulerHints, ComputeEdge4SchedulerHints, ComputeHCIIPs, ComputeHCISchedulerHints, ComputeInstanceHAIPs, ComputeInstanceHASchedulerHints, EnableRhcs4Beta, GrafanaContainerImage, HCIEdge1ControlPlaneSubnet, HCIEdge1IPs, HCIEdge2ControlPlaneSubnet, HCIEdge2IPs, HciEdge2SchedulerHints, NodeExporterContainerImage, OcProvisioningVirtualFixedIPs, OvercloudCephEdge1Flavor, OvercloudComputeEdge1Flavor, OvercloudComputeEdge2Flavor, OvercloudComputeEdge3Flavor, OvercloudComputeEdge4Flavor, OvercloudComputeHCIFlavor, OvercloudComputeInstanceHAFlavor, OvercloudHciEdge2Flavor, OvercloudServiceFlavor, PrometheusContainerImage, ServiceIPs, ServiceSchedulerHints, StorageNFSVirtualFixedIPs
161Deploying templates in the directory /tmp/tripleoclient-3jt0ow20/tripleo-heat-templates
162Initializing overcloud plan deployment
163{'deployment_status': 'DEPLOY_FAILED',
164 'execution_id': '20154b82-d2e2-4050-840a-62b50fbb4802',
165 'message': 'Unable to authenticate. This may indicate missing registry '
166 'credentials or the provided container or namespace does not '
167 'exist. 401 Client Error: Unauthorized for url: '
168 'https://registry.redhat.io/auth/realms/rhcc/protocol/redhat-docker-v2/auth?service=docker-registry&scope=repository%3Arhosp-rhel8%2Fopenstack-cinder-api%3Apull',
169 'plan_name': 'overcloud',
170 'root_execution_id': None,
171 'status': 'FAILED'}
172Unable to authenticate. This may indicate missing registry credentials or the provided container or namespace does not exist. 401 Client Error: Unauthorized for url: https://registry.redhat.io/auth/realms/rhcc/protocol/redhat-docker-v2/auth?service=docker-registry&scope=repository%3Arhosp-rhel8%2Fopenstack-cinder-api%3Apull
173Exception occured while running the command
174Traceback (most recent call last):
175 File "/usr/lib/python3.6/site-packages/tripleoclient/command.py", line 32, in run
176 super(Command, self).run(parsed_args)
177 File "/usr/lib/python3.6/site-packages/osc_lib/command/command.py", line 41, in run
178 return super(Command, self).run(parsed_args)
179 File "/usr/lib/python3.6/site-packages/cliff/command.py", line 185, in run
180 return_code = self.take_action(parsed_args) or 0
181 File "/usr/lib/python3.6/site-packages/tripleoclient/v1/overcloud_deploy.py", line 955, in take_action
182 self._deploy_tripleo_heat_templates_tmpdir(stack, parsed_args)
183 File "/usr/lib/python3.6/site-packages/tripleoclient/v1/overcloud_deploy.py", line 374, in _deploy_tripleo_heat_templates_tmpdir
184 new_tht_root, tht_root)
185 File "/usr/lib/python3.6/site-packages/tripleoclient/v1/overcloud_deploy.py", line 475, in _deploy_tripleo_heat_templates
186 deployment_options=deployment_options)
187 File "/usr/lib/python3.6/site-packages/tripleoclient/v1/overcloud_deploy.py", line 494, in _try_overcloud_deploy_with_compat_yaml
188 deployment_options=deployment_options)
189 File "/usr/lib/python3.6/site-packages/tripleoclient/v1/overcloud_deploy.py", line 247, in _heat_deploy
190 deployment_options=deployment_options)
191 File "/usr/lib/python3.6/site-packages/tripleoclient/workflows/deployment.py", line 84, in deploy_and_wait
192 deploy(log, clients, **workflow_input)
193 File "/usr/lib/python3.6/site-packages/tripleoclient/workflows/deployment.py", line 66, in deploy
194 % (payload['status'], wf_name))
195ValueError: Unexpected status FAILED for tripleo.deployment.v1.deploy_plan
196Unexpected status FAILED for tripleo.deployment.v1.deploy_plan
197END return value: 1
198sys:1: ResourceWarning: unclosed <socket.socket fd=6, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6, laddr=('172.16.0.1', 54166), raddr=('172.16.0.1', 8080)>
199sys:1: ResourceWarning: unclosed <socket.socket fd=4, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6, laddr=('172.16.0.1', 60592), raddr=('172.16.0.1', 5000)>
200sys:1: ResourceWarning: unclosed <socket.socket fd=5, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6, laddr=('172.16.0.1', 46556), raddr=('172.16.0.1', 8004)>
201sys:1: ResourceWarning: unclosed <socket.socket fd=7, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6, laddr=('172.16.0.1', 39682), raddr=('172.16.0.1', 8989)>
202
203 CinderEtcdLocalConnect: True
204
205 # The Cinder service's storage availability zone.
206 # Type: string
207 CinderStorageAvailabilityZone: dcn
208
209 # The cluster name used for deploying the cinder-volume service in an active-active (A/A) configuration. This configuration requires the Cinder backend drivers support A/A, and the cinder-volume service not be managed by pacemaker. If these criteria are not met then the cluster name must be left blank.
210 # Type: string
211 CinderVolumeCluster: dcn
212
213 # Enable Glance Image Cache
214 # Type: boolean
215 GlanceCacheEnabled: False
216
217 # The upper limit on cache size, in bytes, after which the cache-pruner cleans up the image cache.
218 # Type: number
219 GlanceImageCacheMaxSize: 10737418240
220
221 # Manage the network and related resources (subnets and segments) with either create, update, or delete operations (depending on the stack operation). Does not apply to ports which will always be managed as needed. Defaults to true. For multi-stack use cases where the network related resources have already been managed by a separate stack, this parameter can be set to false.
222 # Type: boolean
223 ManageNetworks: False
224
225 # The availability zone where new Nova compute nodes will be added. If the zone does not already exist, it will be created. If left unset, it will default to the value of the stack name.
226 # Type: string
227 NovaComputeAvailabilityZone: ''
228
229
230for i in 71 72 73; do for j in b c d; do ssh heat-admin@172.16.0.$i sudo dd if=/dev/zero of=/dev/vd$j bs=1MB count=5; done ; done
231
232eb 14 16:17:35 compute-1 puppet-user[25867]: Error: 'test -f /etc/pki/CA/certs/vnc.crt' returned 1 instead of one of [0]
233Feb 14 16:17:35 compute-1 puppet-user[25867]: Error: /Stage[main]/Tripleo::Profile::Base::Certmonger_user/Tripleo::Certmonger::Libvirt_vnc[libvirt-vnc-server-cert]/Exec[/etc/pki/CA/certs/vnc.crt]/returns: change from 'notrun' to ['0'] failed: 'test -f /etc/pki/CA/certs/vnc.crt' returned 1 instead of one of [0]
234Feb 14 16:17:35 compute-1 puppet-user[25867]: Notice: /Stage[main]/Tripleo::Profile::Base::Certmonger_user/Tripleo::Certmonger::Libvirt_vnc[libvirt-vnc-server-cert]/File[/etc/pki/CA/certs/vnc.crt]: Dependency Exec[/etc/pki/CA/certs/vnc.crt] has failures: true
235Feb 14 16:17:35 compute-1 puppet-user[25867]: Warning: /Stage[main]/Tripleo::Profile::Base::Certmonger_user/Tripleo::Certmonger::Libvirt_vnc[libvirt-vnc-server-cert]/File[/etc/pki/CA/certs/vnc.crt]: Skipping because of failed dependencies
236Feb 14 16:17:35 compute-1 puppet-user[25867]: Warning: /Stage[main]/Tripleo::Certmonger::Ca::Libvirt_vnc/File[/etc/pki/libvirt-vnc/ca-cert.pem]: Skipping because of failed dependencies
237Feb 14 16:17:35 compute-1 puppet-user[25867]: Notice: /Stage[main]/Tripleo::Profile::Base::Certmonger_user/Tripleo::Certmonger::Libvirt_vnc[libvirt-vnc-server-cert]/File[/etc/pki/libvirt-vnc/server-cert.pem]/mode: mode changed '0600' to '0644'
238Feb 14 16:17:35 compute-1 puppet-user[25867]: Notice: /Stage[main]/Tripleo::Profile::Base::Certmonger_user/Tripleo::Certmonger::Libvirt_vnc[libvirt-vnc-server-cert]/File[/etc/pki/libvirt-vnc/server-key.pem]/group: group changed 'root' to 'qemu'
239Feb 14 16:17:35 compute-1 puppet-user[25867]: Notice: /Stage[main]/Tripleo::Profile::Base::Certmonger_user/Tripleo::Certmonger::Libvirt_vnc[libvirt-vnc-server-cert]/File[/etc/pki/libvirt-vnc/server-key.pem]/mode: mode changed '0600' to '0640'
240Feb 14 16:17:35 compute-1 puppet-user[25867]: Notice: /Stage[main]/Tripleo::Profile::Base::Certmonger_user/Tripleo::Certmonger::Qemu[qemu-nbd-client-cert]/Certmonger_certificate[qemu-nbd-client-cert]/hostname: hostname changed '' to 'compute-1.internalapi.redhat.local'
241Feb 14 16:17:35 compute-1 puppet-user[25867]: Notice: /Stage[main]/Tripleo::Profile::Base::Certmonger_user/Tripleo::Certmonger::Qemu[qemu-nbd-client-cert]/Certmonger_certificate[qemu-nbd-client-cert]/principal: defined 'principal' as 'qemu/compute-1.internalapi.redhat.local'
242Feb 14 16:17:35 compute-1 puppet-user[25867]: Notice: /Stage[main]/Tripleo::Profile::Base::Certmonger_user/Tripleo::Certmonger::Qemu[qemu-nbd-client-cert]/Certmonger_certificate[qemu-nbd-client-cert]/dnsname: defined 'dnsname' as ['compute-1.internalapi.redhat.local']
243
2442020-02-18 02:28:33,165 p=28513 u=mistral | fatal: [hci-1-edge-1]: FAILED! => {"ansible_job_id": "278792794353.17640", "attempts": 7, "changed": true, "cmd": "set -o pipefail; puppet apply --modulepath=/etc/puppet/modules:/opt/stack/puppet-modules:/usr/share/openstack-puppet/modules --detailed-exitcodes --summarize --color=false /var/lib/tripleo-config/puppet_step_config.pp 2>&1 | logger -s -t puppet-user", "delta": "0:00:19.213396", "end": "2020-02-18 07:28:32.572889", "failed_when_result": true, "finished": 1, "msg": "non-zero return code", "rc": 6, "start": "2020-02-18 07:28:13.359493", "stderr": "<13>Feb 18 07:28:13 puppet-user: Warning: The function 'hiera' is deprecated in favor of using 'lookup'. See https://puppet.com/docs/puppet/5.5/deprecated_language.html\\n (file & line not available)\n<13>Feb 18 07:28:18 puppet-user: Warning: /etc/puppet/hiera.yaml: Use of 'hiera.yaml' version 3 is deprecated. It should be converted to version 5\n<13>Feb 18 07:28:18 puppet-user: (file: /etc/puppet/hiera.yaml)\n<13>Feb 18 07:28:18 puppet-user: Warning: Undefined variable '::deploy_config_name'; \\n (file & line not available)\n<13>Feb 18 07:28:18 puppet-user: Warning: ModuleLoader: module 'tripleo' has unresolved dependencies - it will only see those that are resolved. Use 'puppet module list --tree' to see information about modules\\n (file & line not available)\n<13>Feb 18 07:28:19 puppet-user: Warning: Unknown variable: '::deployment_type'. (file: /etc/puppet/modules/tripleo/manifests/profile/base/database/mysql/client.pp, line: 89, column: 8)\n<13>Feb 18 07:28:19 puppet-user: Warning: ModuleLoader: module 'concat' has unresolved dependencies - it will only see those that are resolved. Use 'puppet module list --tree' to see information about modules\\n (file & line not available)\n<13>Feb 18 07:28:19 puppet-user: Warning: tag is a metaparam; this value will inherit to all contained resources in the tripleo::firewall::rule definition\n<13>Feb 18 07:28:19 puppet-user: Notice: Scope(Class[Tripleo::Firewall::Post]): At this stage, all network traffic is blocked.\n<13>Feb 18 07:28:19 puppet-user: Warning: ModuleLoader: module 'nova' has unresolved dependencies - it will only see those that are resolved. Use 'puppet module list --tree' to see information about modules\\n (file & line not available)\n<13>Feb 18 07:28:19 puppet-user: Warning: ModuleLoader: module 'openstacklib' has unresolved dependencies - it will only see those that are resolved. Use 'puppet module list --tree' to see information about modules\\n (file & line not available)\n<13>Feb 18 07:28:19 puppet-user: Notice: Compiled catalog for hci-1-edge-1.redhat.local in environment production in 0.81 seconds\n<13>Feb 18 07:28:19 puppet-user: Notice: /Stage[main]/Main/Package_manifest[/var/lib/tripleo/installed-packages/overcloud_HCIEdge11]/ensure: created\n<13>Feb 18 07:28:20 puppet-user: Notice: /Stage[main]/Certmonger/Service[certmonger]/ensure: ensure changed 'stopped' to 'running'\n<13>Feb 18 07:28:20 puppet-user: Notice: /Stage[main]/Tripleo::Certmonger::Libvirt_dirs/File[/etc/pki/libvirt]/ensure: created\n<13>Feb 18 07:28:20 puppet-user: Notice: /Stage[main]/Tripleo::Certmonger::Libvirt_dirs/File[/etc/pki/libvirt/private]/ensure: created\n<13>Feb 18 07:28:20 puppet-user: Notice: /Stage[main]/Tripleo::Certmonger::Libvirt_vnc_dirs/File[/etc/pki/libvirt-vnc]/ensure: created\n<13>Feb 18 07:28:20 puppet-user: Notice: /Stage[main]/Tripleo::Certmonger::Qemu_dirs/File[/etc/pki/qemu]/ensure: created\n<13>Feb 18 07:28:20 puppet-user: Notice: /Stage[main]/Tripleo::Certmonger::Ca::Qemu/File[/etc/pki/qemu/ca-cert.pem]/ensure: created\n<13>Feb 18 07:28:20 puppet-user: Notice: /Stage[main]/Tripleo::Certmonger::Qemu_nbd_dirs/File[/etc/pki/libvirt-nbd]/ensure: created\n<13>Feb 18 07:28:20 puppet-user: Notice: /Stage[main]/Tripleo::Certmonger::Etcd/Certmonger_certificate[etcd]/ensure: created\n<13>Feb 18 07:28:21 puppet-user: Error: Could not find user etcd\n<13>Feb 18 07:28:21 puppet-user: Error: /Stage[main]/Tripleo::Certmonger::Etcd/File[/etc/pki/tls/certs/etcd.crt]/owner: change from 'root' to 'etcd' failed: Could not find user etcd\n<13>Feb 18 07:28:21 puppet-user: Error: Could not find group etcd\n<13>Feb 18 07:28:21 puppet-user: Error: /Stage[main]/Tripleo::Certmonger::Etcd/File[/etc/pki/tls/certs/etcd.crt]/group: change from 'root' to 'etcd' failed: Could not find group etcd\n<13>Feb 18 07:28:21 puppet-user: Error: Could not find user etcd\n<13>Feb 18 07:28:21 puppet-user: Error: /Stage[main]/Tripleo::Certmonger::Etcd/File[/etc/pki/tls/private/etcd.key]/owner: change from 'root' to 'etcd' failed: Could not find user etcd\n<13>Feb 18 07:28:21 puppet-user: Error: Could not find group etcd\n<13>Feb 18 07:28:21 puppet-user: Error: /Stage[main]/Tripleo::Certmonger::Etcd/File[/etc/pki/tls/private/etcd.key]/group: change from 'root' to 'etcd' failed: Could not find group etcd\n<13>Feb 18 07:28:21 puppet-user: Notice: /Stage[main]/Tripleo::Certmonger::Ovn_controller/Certmonger_certificate[ovn_controller]/ensure: created\n<13>Feb 18 07:28:22 puppet-user: Notice: /Stage[main]/Tripleo::Certmonger::Ovn_metadata/Certmonger_certificate[ovn_metadata]/ensure: created\n<13>Feb 18 07:28:23 puppet-user: Notice: /Stage[main]/Tripleo::Profile::Base::Database::Mysql::Client/File[/etc/my.cnf.d/tripleo.cnf]/ensure: created\n<13>Feb 18 07:28:23 puppet-user: Notice: /Stage[main]/Tripleo::Profile::Base::Database::Mysql::Client/Augeas[tripleo-mysql-client-conf]/returns: executed successfully\n<13>Feb 18 07:28:23 puppet-user: Notice: /Stage[main]/Tripleo::Firewall/Exec[save ipv4 rules]/returns: executed successfully\n<13>Feb 18 07:28:23 puppet-user: Notice: /Stage[main]/Tripleo::Firewall/Exec[save ipv6 rules]/returns: executed successfully\n<13>Feb 18 07:28:23 puppet-user: Notice: /Stage[main]/Firewall::Linux::Redhat/Service[iptables]/ensure: ensure changed 'stopped' to 'running'\n<13>Feb 18 07:28:24 puppet-user: Notice: /Stage[main]/Firewall::Linux::Redhat/Service[nftables]/ensure: ensure changed 'stopped' to 'running'\n<13>Feb 18 07:28:24 puppet-user: Notice: /Stage[main]/Firewall::Linux::Redhat/Service[ip6tables]/ensure: ensure changed 'stopped' to 'running'\n<13>Feb 18 07:28:24 puppet-user: Notice: /Stage[main]/Tripleo::Trusted_cas/Tripleo::Trusted_ca[undercloud-ca]/File[/etc/pki/ca-trust/source/anchors/undercloud-ca.pem]/ensure: defined content as '{md5}d1d347f9b3a67b9633a63752ab53310f'\n<13>Feb 18 07:28:25 puppet-user: Notice: /Stage[main]/Tripleo::Trusted_cas/Tripleo::Trusted_ca[undercloud-ca]/Exec[trust-ca-undercloud-ca]: Triggered 'refresh' from 1 event\n<13>Feb 18 07:28:25 puppet-user: Notice: /Stage[main]/Tripleo::Profile::Base::Certmonger_user/Tripleo::Certmonger::Libvirt[libvirt-client-cert]/Certmonger_certificate[libvirt-client-cert]/ensure: created\n<13>Feb 18 07:28:25 puppet-user: Notice: /Stage[main]/Tripleo::Profile::Base::Certmonger_user/Tripleo::Certmonger::Libvirt[libvirt-server-cert]/Certmonger_certificate[libvirt-server-cert]/ensure: created\n<13>Feb 18 07:28:26 puppet-user: Notice: /Stage[main]/Tripleo::Profile::Base::Certmonger_user/Tripleo::Certmonger::Libvirt_vnc[libvirt-vnc-server-cert]/Certmonger_certificate[libvirt-vnc-server-cert]/ensure: created\n<13>Feb 18 07:28:27 puppet-user: Notice: /Stage[main]/Tripleo::Profile::Base::Certmonger_user/Tripleo::Certmonger::Libvirt_vnc[libvirt-vnc-server-cert]/File[/etc/pki/CA/certs/vnc.crt]/mode: mode changed '0600' to '0644'\n<13>Feb 18 07:28:27 puppet-user: Notice: /Stage[main]/Tripleo::Certmonger::Ca::Libvirt_vnc/File[/etc/pki/libvirt-vnc/ca-cert.pem]/ensure: created\n<13>Feb 18 07:28:27 puppet-user: Notice: /Stage[main]/Tripleo::Profile::Base::Certmonger_user/Tripleo::Certmonger::Libvirt_vnc[libvirt-vnc-server-cert]/File[/etc/pki/libvirt-vnc/server-cert.pem]/mode: mode changed '0600' to '0644'\n<13>Feb 18 07:28:27 puppet-user: Notice: /Stage[main]/Tripleo::Profile::Base::Certmonger_user/Tripleo::Certmonger::Libvirt_vnc[libvirt-vnc-server-cert]/File[/etc/pki/libvirt-vnc/server-key.pem]/group: group changed 'root' to 'qemu'\n<13>Feb 18 07:28:27 puppet-user: Notice: /Stage[main]/Tripleo::Profile::Base::Certmonger_user/Tripleo::Certmonger::Libvirt_vnc[libvirt-vnc-server-cert]/File[/etc/pki/libvirt-vnc/server-key.pem]/mode: mode changed '0600' to '0640'\n<13>Feb 18 07:28:27 puppet-user: Notice: /Stage[main]/Tripleo::Profile::Base::Certmonger_user/Tripleo::Certmonger::Qemu[qemu-nbd-client-cert]/Certmonger_certificate[qemu-nbd-client-cert]/ensure: created\n<13>Feb 18 07:28:28 puppet-user: Notice: /Stage[main]/Tripleo::Profile::Base::Certmonger_user/Tripleo::Certmonger::Qemu[qemu-nbd-client-cert]/File[/etc/pki/libvirt-nbd/client-cert.pem]/mode: mode changed '0600' to '0644'\n<13>Feb 18 07:28:28 puppet-user: Notice: /Stage[main]/Tripleo::Profile::Base::Certmonger_user/Tripleo::Certmonger::Qemu[qemu-nbd-client-cert]/File[/etc/pki/libvirt-nbd/client-key.pem]/group: group changed 'root' to 'qemu'\n<13>Feb 18 07:28:28 puppet-user: Notice: /Stage[main]/Tripleo::Profile::Base::Certmonger_user/Tripleo::Certmonger::Qemu[qemu-nbd-client-cert]/File[/etc/pki/libvirt-nbd/client-key.pem]/mode: mode changed '0600' to '0640'\n<13>Feb 18 07:28:28 puppet-user: Notice: /Stage[main]/Tripleo::Profile::Base::Certmonger_user/Tripleo::Certmonger::Qemu[qemu-server-cert]/Certmonger_certificate[qemu-server-cert]/ensure: created\n<13>Feb 18 07:28:28 puppet-user: Notice: /Stage[main]/Tripleo::Certmonger::Ca::Crl/Exec[tripleo-ca-crl]/returns: executed successfully\n<13>Feb 18 07:28:28 puppet-user: Notice: /Stage[main]/Tripleo::Certmonger::Ca::Crl/File[tripleo-ca-crl-file]/seluser: seluser changed 'unconfined_u' to 'system_u'\n<13>Feb 18 07:28:28 puppet-user: Notice: /Stage[main]/Tripleo::Certmonger::Ca::Crl/Exec[tripleo-ca-crl-process-command]: Triggered 'refresh' from 2 events\n<13>Feb 18 07:28:28 puppet-user: Notice: /Stage[main]/Tripleo::Certmonger::Ca::Crl/Cron[tripleo-refresh-crl-file]/ensure: created\n<13>Feb 18 07:28:28 puppet-user: Notice: /Stage[main]/Tripleo::Profile::Base::Certmonger_user/Tripleo::Certmonger::Qemu[qemu-server-cert]/File[/etc/pki/qemu/server-cert.pem]/mode: mode changed '0600' to '0644'\n<13>Feb 18 07:28:28 puppet-user: Notice: /Stage[main]/Tripleo::Profile::Base::Certmonger_user/Tripleo::Certmonger::Qemu[qemu-server-cert]/File[/etc/pki/qemu/server-key.pem]/group: group changed 'root' to 'qemu'\n<13>Feb 18 07:28:28 puppet-user: Notice: /Stage[main]/Tripleo::Profile::Base::Certmonger_user/Tripleo::Certmonger::Qemu[qemu-server-cert]/File[/etc/pki/qemu/server-key.pem]/mode: mode changed '0600' to '0640'\n<13>Feb 18 07:28:28 puppet-user: Notice: /Stage[main]/Ssh::Server::Config/Concat[/etc/ssh/sshd_config]/File[/etc/ssh/sshd_config]/content: content changed '{md5}9f750a19eee2319c02a32909223b7141' to '{md5}3534841fdb8db5b58d66600a60bf3759'\n<13>Feb 18 07:28:28 puppet-user: Notice: /Stage[main]/Ssh::Server::Service/Service[sshd]: Triggered 'refresh' from 2 events\n<13>Feb 18 07:28:28 puppet-user: Notice: /Stage[main]/Tripleo::Firewall::Pre/Tripleo::Firewall::Rule[000 accept related established rules]/Firewall[000 accept related established rules ipv4]/ensure: created\n<13>Feb 18 07:28:28 puppet-user: Notice: /Stage[main]/Tripleo::Firewall::Pre/Tripleo::Firewall::Rule[000 accept related established rules]/Firewall[000 accept related established rules ipv6]/ensure: created\n<13>Feb 18 07:28:28 puppet-user: Notice: /Stage[main]/Tripleo::Firewall::Pre/Tripleo::Firewall::Rule[001 accept all icmp]/Firewall[001 accept all icmp ipv4]/ensure: created\n<13>Feb 18 07:28:29 puppet-user: Notice: /Stage[main]/Tripleo::Firewall::Pre/Tripleo::Firewall::Rule[001 accept all icmp]/Firewall[001 accept all icmp ipv6]/ensure: created\n<13>Feb 18 07:28:29 puppet-user: Notice: /Stage[main]/Tripleo::Firewall::Pre/Tripleo::Firewall::Rule[002 accept all to lo interface]/Firewall[002 accept all to lo interface ipv4]/ensure: created\n<13>Feb 18 07:28:29 puppet-user: Notice: /Stage[main]/Tripleo::Firewall::Pre/Tripleo::Firewall::Rule[002 accept all to lo interface]/Firewall[002 accept all to lo interface ipv6]/ensure: created\n<13>Feb 18 07:28:29 puppet-user: Notice: /Stage[main]/Tripleo::Firewall::Pre/Tripleo::Firewall::Rule[004 accept ipv6 dhcpv6]/Firewall[004 accept ipv6 dhcpv6 ipv6]/ensure: created\n<13>Feb 18 07:28:29 puppet-user: Notice: /Stage[main]/Tripleo::Firewall
245
246Feb 20 16:19:47 puppet-user: Notice: /Stage[main]/Tripleo::Certmonger::Ca::Crl/Exec[tripleo-ca-crl]/returns: executed successfully\n<13>Feb 20 16:19:47 puppet-user: Notice: /Stage[main]/Tripleo::Certmonger::Ca::Crl/File[tripleo-ca-crl-file]/seluser: seluser changed 'unconfined_u' to 'system_u'\n<13>Feb 20 16:19:47 puppet-user: Notice: /Stage[main]/Tripleo::Certmonger::Ca::Crl/Exec[tripleo-ca-crl-process-command]: Triggered 'refresh' from 2 events\n<13>Feb 20 16:19:47 puppet-user: Notice: /Stage[main]/Tripleo::Certmonger::Ca::Crl/Cron[tripleo-refresh-crl-file]/ensure: created\n<13>Feb 20 16:19:50 puppet-user: Notice: /Stage[main]/Pacemaker::Stonith/Pacemaker::Property[Disable STONITH]/Pcmk_property[property--stonith-enabled]/ensure: created\n<13>Feb 20 16:24:54 puppet-user: Error: pcs create failed: Error: Unable to communicate with computeiha-1\n<13>Feb 20 16:24:54 puppet-user: Error: /Stage[main]/Tripleo::Profile::Base::Pacemaker/Pacemaker::Resource::Remote[computeiha-1]/Pcmk_remote[computeiha-1]/ensure: change from 'absent' to 'present' failed: pcs create failed: Error: Unable to communicate with computeiha-1\n<13>Feb 20 16:24:54 puppet-user: Notice: /Stage[main]/Tripleo::Profile::Base::Pacemaker/Exec[exec-wait-for-computeiha-1]: Dependency Pcmk_remote[computeiha-1] has failures: true\n<13>Feb 20 16:24:54 puppet-user: Warning: /Stage[main]/Tripleo::Profile::Base::Pacemaker/Exec[exec-wait-for-computeiha-1]: Skipping because of failed dependencies\n<13>Feb 20 16:29:59 puppet-user: Error: pcs create failed: Error: Unable to communicate with computeiha-2\n<13>Feb 20 16:29:59 puppet-user: Error: /Stage[main]/Tripleo::Profile::Base::Pacemaker/Pacemaker::Resource::Remote[computeiha-2]/Pcmk_remote[computeiha-2]/ensure: change from 'absent' to 'present' failed: pcs create failed: Error: Unable to communicate with computeiha-2\n<13>Feb 20 16:29:59 puppet-user: Notice: /Stage[main]/Tripleo::Profile::Base::Pacemaker/Exec[exec-wait-for-computeiha-2]: Dependency Pcmk_remote[computeiha-2] has failures: true\n<13>Feb 20 16:29:59 puppet-user: Warning: /Stage[main]/Tripleo::Profile::Base::Pacemaker/Exec[exec-wait-for-com
247
248Feb 20 16:24:54 controller-1 puppet-user[24152]: Error: pcs create failed: Error: Unable to communicate with computeiha-1
249Feb 20 16:24:54 controller-1 puppet-user[24152]: Error: /Stage[main]/Tripleo::Profile::Base::Pacemaker/Pacemaker::Resource::Remote[computeiha-1]/Pcmk_remote[computeiha-1]/ensure: change from 'absent' to 'present' failed: pcs create failed: Error: Unable to communicate with computeiha-1
250Feb 20 16:24:54 controller-1 puppet-user[24152]: Notice: /Stage[main]/Tripleo::Profile::Base::Pacemaker/Exec[exec-wait-for-computeiha-1]: Dependency Pcmk_remote[computeiha-1] has failures: true
251Feb 20 16:24:54 controller-1 puppet-user[24152]: Warning: /Stage[main]/Tripleo::Profile::Base::Pacemaker/Exec[exec-wait-for-computeiha-1]: Skipping because of failed dependencies
252
253eb 20 16:29:59 controller-1 puppet-user[24152]: Error: pcs create failed: Error: Unable to communicate with computeiha-2
254Feb 20 16:29:59 controller-1 puppet-user[24152]: Error: /Stage[main]/Tripleo::Profile::Base::Pacemaker/Pacemaker::Resource::Remote[computeiha-2]/Pcmk_remote[computeiha-2]/ensure: change from 'absent' to 'present' failed: pcs create failed: Error: Unable to communicate with computeiha-2
255Feb 20 16:29:59 controller-1 puppet-user[24152]: Notice: /Stage[main]/Tripleo::Profile::Base::Pacemaker/Exec[exec-wait-for-computeiha-2]: Dependency Pcmk_remote[computeiha-2] has failures: true
256Feb 20 16:29:59 controller-1 puppet-user[24152]: Warning: /Stage[main]/Tripleo::Profile::Base::Pacemaker/Exec[exec-wait-for-computeiha-2]: Skipping because of failed dependencies
257
258ar 4 01:32:54 controller-2 podman[648242]: exec: PID 57: spawning /usr/bin/ganesha.nfsd -F -L STDOUT
259Mar 4 01:32:54 controller-2 podman[648242]: exec: Waiting 57 to quit
260Mar 4 01:32:54 controller-2 podman[648242]: 04/03/2020 01:32:54 : epoch 5e5f0546 : controller-2.redhat.local : ganesha.nfsd-57[main] main :MAIN :EVENT :ganesha.nfsd Starting: Ganesha Version /builddir/build/BUILD/nfs-ganesha-2.8.3/src, built at Jan 17 2020 20:56:03 on
261Mar 4 01:32:54 controller-2 podman[648242]: 04/03/2020 01:32:54 : epoch 5e5f0546 : controller-2.redhat.local : ganesha.nfsd-57[main] load_rados_config :CONFIG :CRIT :Unknown urls backend
262Mar 4 01:32:54 controller-2 podman[648242]: 04/03/2020 01:32:54 : epoch 5e5f0546 : controller-2.redhat.local : ganesha.nfsd-57[main] main :NFS STARTUP :CRIT :Error (token scan) while parsing (/etc/ganesha/ganesha.conf)
263Mar 4 01:32:54 controller-2 podman[648242]: 04/03/2020 01:32:54 : epoch 5e5f0546 : controller-2.redhat.local : ganesha.nfsd-57[main] config_errs_to_log :CONFIG :CRIT :Config File (/etc/ganesha/ganesha.conf:24): new url (rados://manila_data/ganesha-export-index) open error (Success), ignored
264Mar 4 01:32:54 controller-2 podman[648242]: 04/03/2020 01:32:54 : epoch 5e5f0546 : controller-2.redhat.local : ganesha.nfsd-57[main] main :NFS STARTUP :FATAL :Fatal errors. Server exiting...
265Mar 4 01:32:54 controller-2 podman[648242]: teardown: managing teardown after SIGCHLD
266Mar 4 01:32:54 controller-2 podman[648242]: teardown: Waiting PID 57 to terminate
267Mar 4 01:32:54 controller-2 podman[648242]: teardown: Process 57 is terminated
268Mar 4 01:32:54 controller-2 podman[648242]: teardown: Bye Bye, container will die with return code 0
269
270Mar 4 01:32:54 controller-2 podman[648242]: 2020-03-04 01:32:54.559244131 +0000 UTC m=+0.543639734 container died f1701c35ab4f3d77546d414c445f8068a557a2c25ccd188e59e3b2ed12da9ffb (image=172.16.0.1:8787/rhceph/rhceph-4-rhel8:4-15, name=ceph-nfs-pacemaker)
271Mar 4 01:32:54 controller-2 podman[648242]: 2020-03-04 01:32:54.606329773 +0000 UTC m=+0.590725407 container remove f1701c35ab4f3d77546d414c445f8068a557a2c25ccd188e59e3b2ed12da9ffb (image=172.16.0.1:8787/rhceph/rhceph-4-rhel8:4-15, name=ceph-nfs-pacemaker)
272Mar 4 01:32:54 controller-2 podman[648528]: Error: no container with name or ID ceph-nfs-pacemaker found: no such container
273Mar 4 01:32:54 controller-2 podman[60641]: audit 2020-03-04 01:32:54.261251 mon.controller-3 (mon.2) 4211 : audit [DBG] from='client.? 172.18.0.61:0/3709981479' entity='client.openstack' cmd=[{"prefix": "df", "format": "json"}]: dispatch
274Mar 4 01:32:54 controller-2 podman[60641]: audit 2020-03-04 01:32:54.736883 mon.controller-1 (mon.0) 5579 : audit [DBG] from='client.? 172.18.0.61:0/4229001602' entity='client.openstack' cmd=[{"prefix": "df", "format": "json"}]: dispatch
275Mar 4 01:32:55 controller-2 pacemaker-controld[23191]: notice: Result of start operation for ceph-nfs on controller-2: 1 (unknown error)
276Mar 4 01:32:55 controller-2 pacemaker-controld[23191]: warning: Action 269 (ceph-nfs_start_0) on controller-2 failed (target: 0 vs. rc: 1): Error
277Mar 4 01:32:55 controller-2 pacemaker-controld[23191]: notice: Transition 309 aborted by operation ceph-nfs_start_0 'modify' on controller-2: Event failed
278Mar 4 01:32:55 controller-2 pacemaker-controld[23191]: notice: Transition 309 (Complete=5, Pending=0, Fired=0, Skipped=0, Incomplete=5, Source=/var/lib/pacemaker/pengine/pe-input-129.bz2): Complete
279Mar 4 01:32:55 controller-2 pacemaker-schedulerd[23190]: warning: Processing failed start of ceph-nfs on controller-2: unknown error
280Mar 4 01:32:55 controller-2 pacemaker-schedulerd[23190]: warning: Processing failed start of ceph-nfs on controller-2: unknown error
281Mar 4 01:32:55 controller-2 pacemaker-schedulerd[23190]: warning: Processing failed start of ceph-nfs on controller-3: unknown error
282Mar 4 01:32:55 controller-2 pacemaker-schedulerd[23190]: warning: Processing failed start of ceph-nfs on controller-1: unknown error
283Mar 4 01:32:55 controller-2 pacemaker-schedulerd[23190]: warning: Forcing ceph-nfs away from controller-1 after 1000000 failures (max=1000000)
284Mar 4 01:32:55 controller-2 pacemaker-schedulerd[23190]: warning: Forcing ceph-nfs away from controller-3 after 1000000 failures (max=1000000)
285Mar 4 01:32:55 controller-2 pacemaker-schedulerd[23190]: notice: * Start openstack-manila-share-podman-0 ( controller-2 )
286Mar 4 01:32:55 controller-2 pacemaker-schedulerd[23190]: notice: * Recover ceph-nfs ( controller-2 )
287Mar 4 01:32:55 controller-2 pacemaker-schedulerd[23190]: notice: Calculated transition 310, saving inputs in /var/lib/pacemaker/pengine/pe-input-130.bz2
288Mar 4 01:32:56 controller-2 pacemaker-schedulerd[23190]: warning: Processing failed start of ceph-nfs on controller-2: unknown error
289Mar 4 01:32:56 controller-2 pacemaker-schedulerd[23190]: warning: Processing failed start of ceph-nfs on controller-2: unknown error
290Mar 4 01:32:56 controller-2 pacemaker-schedulerd[23190]: warning: Processing failed start of ceph-nfs on controller-3: unknown error
291Mar 4 01:32:56 controller-2 pacemaker-schedulerd[23190]: warning: Processing failed start of ceph-nfs on controller-1: unknown error
292Mar 4 01:32:56 controller-2 pacemaker-schedulerd[23190]: warning: Forcing ceph-nfs away from controller-1 after 1000000 failures (max=1000000)
293Mar 4 01:32:56 controller-2 pacemaker-schedulerd[23190]: warning: Forcing ceph-nfs away from controller-2 after 1000000 failures (max=1000000)
294Mar 4 01:32:56 controller-2 pacemaker-schedulerd[23190]: warning: Forcing ceph-nfs away from controller-3 after 1000000 failures (max=1000000)
295
296ansible-playbook -i inventory redeploy-normal.yaml -e "ssl=0 dcn=1 osp_version=16 username=rhn-support-sputhenp password=I20Nigh8761 split_stack=1 skip_deploy=1"
297
2982020-03-10 11:04:02.924 24 INFO ovsdbapp.backend.ovs_idl.idlutils [-] Could not retrieve schema from tcp:172.20.0.104:6641: Unknown error -1
2992020-03-10 11:07:02.935 23 INFO ovsdbapp.backend.ovs_idl.idlutils [-] Could not retrieve schema from tcp:172.20.0.104:6641: Unknown error -1
3002020-03-10 11:07:03.029 24 INFO ovsdbapp.backend.ovs_idl.idlutils [-] Could not retrieve schema from tcp:172.20.0.104:6641: Unknown error -1
301
302
303
304(overcloud) [stack@undercloud ~]$ openstack loadbalancer create --name lb1 --vip-subnet-id private-1-subnet-1
305Expecting value: line 1 column 1 (char 0)
306(overcloud) [stack@undercloud ~]$ openstack --debug loadbalancer create --name lb1 --vip-subnet-id private-1-subnet-1
307START with options: --debug loadbalancer create --name lb1 --vip-subnet-id private-1-subnet-1
308options: Namespace(access_token='***', access_token_endpoint='', access_token_type='', aodh_endpoint='', application_credential_id='', application_credential_name='', application_credential_secret='***', auth_methods='', auth_type='password', auth_url='https://overcloud.redhat.local:13000', cacert=None, cert='', client_id='', client_secret='***', cloud='', code='', debug=True, default_domain='default', default_domain_id='', default_domain_name='', deferred_help=False, discovery_endpoint='', domain_id='', domain_name='', endpoint='', identity_provider='', identity_provider_url='', insecure=None, inspector_api_version='1', inspector_url=None, interface='public', key='', log_file=None, openid_scope='', os_alarming_api_version='2', os_baremetal_api_version='1.58', os_beta_command=False, os_compute_api_version='2.latest', os_container_infra_api_version='1', os_data_processing_api_version='1.1', os_data_processing_url='', os_database_api_version='1', os_dns_api_version='2', os_event_api_version='2', os_identity_api_version='3', os_image_api_version='2', os_key_manager_api_version='1', os_loadbalancer_api_version='2.0', os_metrics_api_version='1', os_network_api_version='', os_object_api_version='', os_orchestration_api_version='1', os_project_id=None, os_project_name=None, os_queues_api_version='2', os_tripleoclient_api_version='1', os_volume_api_version='3', os_workflow_api_version='2', passcode='', password='***', profile='', project_domain_id='', project_domain_name='Default', project_id='', project_name='admin', protocol='', redirect_uri='', region_name='regionOne', remote_project_domain_id='', remote_project_domain_name='', remote_project_id='', remote_project_name='', roles='', service_provider='', service_provider_endpoint='', service_provider_entity_id='', system_scope='', timing=False, token='***', trust_id='', user='', user_domain_id='', user_domain_name='Default', user_id='', username='admin', verbose_level=3, verify=None)
309Auth plugin password selected
310auth_config_hook(): {'api_timeout': None, 'verify': True, 'cacert': None, 'cert': None, 'key': None, 'baremetal_status_code_retries': '5', 'baremetal_introspection_status_code_retries': '5', 'image_status_code_retries': '5', 'disable_vendor_agent': {}, 'interface': 'public', 'floating_ip_source': 'neutron', 'image_api_use_tasks': False, 'image_format': 'qcow2', 'message': '', 'network_api_version': '2', 'object_store_api_version': '1', 'secgroup_source': 'neutron', 'status': 'active', 'auth': {'user_domain_name': 'Default', 'project_domain_name': 'Default', 'project_name': 'admin'}, 'verbose_level': 3, 'deferred_help': False, 'debug': True, 'region_name': 'regionOne', 'default_domain': 'default', 'timing': False, 'inspector_api_version': '1', 'auth_url': 'https://overcloud.redhat.local:13000', 'username': 'admin', 'password': '***', 'beta_command': False, 'compute_api_version': '2.latest', 'identity_api_version': '3', 'image_api_version': '2', 'volume_api_version': '3', 'queues_api_version': '2', 'database_api_version': '1', 'tripleoclient_api_version': '1', 'data_processing_api_version': '1.1', 'loadbalancer_api_version': '2.0', 'workflow_api_version': '2', 'container_infra_api_version': '1', 'baremetal_api_version': '1.58', 'orchestration_api_version': '1', 'dns_api_version': '2', 'key_manager_api_version': '1', 'event_api_version': '2', 'metrics_api_version': '1', 'alarming_api_version': '2', 'auth_type': 'password', 'networks': []}
311defaults: {'api_timeout': None, 'verify': True, 'cacert': None, 'cert': None, 'key': None, 'auth_type': 'password', 'baremetal_status_code_retries': 5, 'baremetal_introspection_status_code_retries': 5, 'image_status_code_retries': 5, 'disable_vendor_agent': {}, 'interface': None, 'floating_ip_source': 'neutron', 'image_api_use_tasks': False, 'image_format': 'qcow2', 'message': '', 'network_api_version': '2', 'object_store_api_version': '1', 'secgroup_source': 'neutron', 'status': 'active'}
312cloud cfg: {'api_timeout': None, 'verify': True, 'cacert': None, 'cert': None, 'key': None, 'baremetal_status_code_retries': '5', 'baremetal_introspection_status_code_retries': '5', 'image_status_code_retries': '5', 'disable_vendor_agent': {}, 'interface': 'public', 'floating_ip_source': 'neutron', 'image_api_use_tasks': False, 'image_format': 'qcow2', 'message': '', 'network_api_version': '2', 'object_store_api_version': '1', 'secgroup_source': 'neutron', 'status': 'active', 'auth': {'user_domain_name': 'Default', 'project_domain_name': 'Default', 'project_name': 'admin'}, 'verbose_level': 3, 'deferred_help': False, 'debug': True, 'region_name': 'regionOne', 'default_domain': 'default', 'timing': False, 'inspector_api_version': '1', 'auth_url': 'https://overcloud.redhat.local:13000', 'username': 'admin', 'password': '***', 'beta_command': False, 'compute_api_version': '2.latest', 'identity_api_version': '3', 'image_api_version': '2', 'volume_api_version': '3', 'queues_api_version': '2', 'database_api_version': '1', 'tripleoclient_api_version': '1', 'data_processing_api_version': '1.1', 'loadbalancer_api_version': '2.0', 'workflow_api_version': '2', 'container_infra_api_version': '1', 'baremetal_api_version': '1.58', 'orchestration_api_version': '1', 'dns_api_version': '2', 'key_manager_api_version': '1', 'event_api_version': '2', 'metrics_api_version': '1', 'alarming_api_version': '2', 'auth_type': 'password', 'networks': []}
313compute API version 2.latest, cmd group openstack.compute.v2
314identity API version 3, cmd group openstack.identity.v3
315image API version 2, cmd group openstack.image.v2
316network API version 2, cmd group openstack.network.v2
317object_store API version 1, cmd group openstack.object_store.v1
318volume API version 3, cmd group openstack.volume.v3
319messaging API version 2, cmd group openstack.messaging.v2
320database API version 1, cmd group openstack.database.v1
321tripleoclient API version 1, cmd group openstack.tripleoclient.v1
322data_processing API version 1.1, cmd group openstack.data_processing.v1
323load_balancer API version 2.0, cmd group openstack.load_balancer.v2
324neutronclient API version 2, cmd group openstack.neutronclient.v2
325workflow_engine API version 2, cmd group openstack.workflow_engine.v2
326container_infra API version 1, cmd group openstack.container_infra.v1
327baremetal API version 1.58, cmd group openstack.baremetal.v1
328baremetal_introspection API version 1, cmd group openstack.baremetal_introspection.v1
329orchestration API version 1, cmd group openstack.orchestration.v1
330dns API version 2, cmd group openstack.dns.v2
331key_manager API version 1, cmd group openstack.key_manager.v1
332event API version 2, cmd group openstack.event.v2
333metric API version 1, cmd group openstack.metric.v1
334alarming API version 2, cmd group openstack.alarming.v2
335Auth plugin password selected
336auth_config_hook(): {'api_timeout': None, 'verify': True, 'cacert': None, 'cert': None, 'key': None, 'baremetal_status_code_retries': '5', 'baremetal_introspection_status_code_retries': '5', 'image_status_code_retries': '5', 'disable_vendor_agent': {}, 'interface': 'public', 'floating_ip_source': 'neutron', 'image_api_use_tasks': False, 'image_format': 'qcow2', 'message': '', 'network_api_version': '2', 'object_store_api_version': '1', 'secgroup_source': 'neutron', 'status': 'active', 'auth': {'user_domain_name': 'Default', 'project_domain_name': 'Default', 'project_name': 'admin'}, 'verbose_level': 3, 'deferred_help': False, 'debug': True, 'region_name': 'regionOne', 'default_domain': 'default', 'timing': False, 'inspector_api_version': '1', 'auth_url': 'https://overcloud.redhat.local:13000', 'username': 'admin', 'password': '***', 'beta_command': False, 'compute_api_version': '2.latest', 'identity_api_version': '3', 'image_api_version': '2', 'volume_api_version': '3', 'queues_api_version': '2', 'database_api_version': '1', 'tripleoclient_api_version': '1', 'data_processing_api_version': '1.1', 'loadbalancer_api_version': '2.0', 'workflow_api_version': '2', 'container_infra_api_version': '1', 'baremetal_api_version': '1.58', 'orchestration_api_version': '1', 'dns_api_version': '2', 'key_manager_api_version': '1', 'event_api_version': '2', 'metrics_api_version': '1', 'alarming_api_version': '2', 'auth_type': 'password', 'networks': []}
337command: loadbalancer create -> octaviaclient.osc.v2.load_balancer.CreateLoadBalancer (auth=True)
338Auth plugin password selected
339auth_config_hook(): {'api_timeout': None, 'verify': True, 'cacert': None, 'cert': None, 'key': None, 'baremetal_status_code_retries': '5', 'baremetal_introspection_status_code_retries': '5', 'image_status_code_retries': '5', 'disable_vendor_agent': {}, 'interface': 'public', 'floating_ip_source': 'neutron', 'image_api_use_tasks': False, 'image_format': 'qcow2', 'message': '', 'network_api_version': '2', 'object_store_api_version': '1', 'secgroup_source': 'neutron', 'status': 'active', 'auth': {'user_domain_name': 'Default', 'project_domain_name': 'Default', 'project_name': 'admin'}, 'additional_user_agent': [('osc-lib', '1.14.1')], 'verbose_level': 3, 'deferred_help': False, 'debug': True, 'region_name': 'regionOne', 'default_domain': 'default', 'timing': False, 'inspector_api_version': '1', 'auth_url': 'https://overcloud.redhat.local:13000', 'username': 'admin', 'password': '***', 'beta_command': False, 'compute_api_version': '2.latest', 'identity_api_version': '3', 'image_api_version': '2', 'volume_api_version': '3', 'queues_api_version': '2', 'database_api_version': '1', 'tripleoclient_api_version': '1', 'data_processing_api_version': '1.1', 'loadbalancer_api_version': '2.0', 'workflow_api_version': '2', 'container_infra_api_version': '1', 'baremetal_api_version': '1.58', 'orchestration_api_version': '1', 'dns_api_version': '2', 'key_manager_api_version': '1', 'event_api_version': '2', 'metrics_api_version': '1', 'alarming_api_version': '2', 'auth_type': 'password', 'networks': []}
340Using auth plugin: password
341Using parameters {'auth_url': 'https://overcloud.redhat.local:13000', 'project_name': 'admin', 'project_domain_name': 'Default', 'username': 'admin', 'user_domain_name': 'Default', 'password': '***'}
342Get auth_ref
343REQ: curl -g -i -X GET https://overcloud.redhat.local:13000 -H "Accept: application/json" -H "User-Agent: openstacksdk/0.36.0 keystoneauth1/3.17.1 python-requests/2.20.0 CPython/3.6.8"
344Starting new HTTPS connection (1): overcloud.redhat.local:13000
345https://overcloud.redhat.local:13000 "GET / HTTP/1.1" 300 279
346RESP: [300] Content-Length: 279 Content-Type: application/json Date: Tue, 10 Mar 2020 15:48:02 GMT Location: https://overcloud.redhat.local:13000/v3/ Server: Apache Vary: X-Auth-Token x-openstack-request-id: req-65d8363a-c4f2-42eb-b742-65ff4cc6da1d
347RESP BODY: {"versions": {"values": [{"id": "v3.13", "status": "stable", "updated": "2019-07-19T00:00:00Z", "links": [{"rel": "self", "href": "https://overcloud.redhat.local:13000/v3/"}], "media-types": [{"base": "application/json", "type": "application/vnd.openstack.identity-v3+json"}]}]}}
348GET call to https://overcloud.redhat.local:13000/ used request id req-65d8363a-c4f2-42eb-b742-65ff4cc6da1d
349Making authentication request to https://overcloud.redhat.local:13000/v3/auth/tokens
350https://overcloud.redhat.local:13000 "POST /v3/auth/tokens HTTP/1.1" 201 7898
351{"token": {"methods": ["password"], "user": {"domain": {"id": "default", "name": "Default"}, "id": "1cda561adebf493b9ba3f3686de23522", "name": "admin", "password_expires_at": null}, "audit_ids": ["utUFbIN_QpOkgQgQUyC0hw"], "expires_at": "2020-03-10T16:48:02.000000Z", "issued_at": "2020-03-10T15:48:02.000000Z", "project": {"domain": {"id": "default", "name": "Default"}, "id": "34730f4be8c54c569b0059786c6e100a", "name": "admin"}, "is_domain": false, "roles": [{"id": "35108f06c3814bedadd524f98961c402", "name": "reader"}, {"id": "e3988e47d7bd499b85adc897575b93a0", "name": "admin"}, {"id": "ec2ea884e3a3413199b04aacdaf1bf4e", "name": "_member_"}, {"id": "50c378d402db40e187e2558f75cc5ea5", "name": "member"}], "catalog": [{"endpoints": [{"id": "3922473da40c4cc28d580caf892cefe5", "interface": "admin", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:9292", "region": "regionOne"}, {"id": "3d79504528ca4cdea4d9def8128584ae", "interface": "internal", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:9292", "region": "regionOne"}, {"id": "aa4be6ca56014583b573f19d7bfa3414", "interface": "public", "region_id": "regionOne", "url": "https://overcloud.redhat.local:13292", "region": "regionOne"}], "id": "01c96d92cb0a4cf99caf14894a4b6fe2", "type": "image", "name": "glance"}, {"endpoints": [{"id": "3ea1e66c5c38499e8a120383c83d5c82", "interface": "internal", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:8774/v2.1", "region": "regionOne"}, {"id": "70dd2ae2b77b4bb58d58b68fdf22d097", "interface": "public", "region_id": "regionOne", "url": "https://overcloud.redhat.local:13774/v2.1", "region": "regionOne"}, {"id": "75f53cd0dd094341a477d2351dd3a68b", "interface": "admin", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:8774/v2.1", "region": "regionOne"}], "id": "23d7862648774dfaa659f03c891e0d32", "type": "compute", "name": "nova"}, {"endpoints": [{"id": "68b7dfc435674139b7a69a7e82e0f411", "interface": "admin", "region_id": "regionOne", "url": "https://overcloud.ctlplane.redhat.local:35357", "region": "regionOne"}, {"id": "98c96768cf2a49279d41fb4f8fa2b25a", "interface": "public", "region_id": "regionOne", "url": "https://overcloud.redhat.local:13000", "region": "regionOne"}, {"id": "d745eccd33fb42b188d32e91e3bdf608", "interface": "internal", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:5000", "region": "regionOne"}], "id": "34efa591adb741d7b3d96f96599ab0ce", "type": "identity", "name": "keystone"}, {"endpoints": [{"id": "5e395d93f03240e99d0b4f077ed1c275", "interface": "internal", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:8778/placement", "region": "regionOne"}, {"id": "cd48a84fc4c64abba6c658e582f71471", "interface": "public", "region_id": "regionOne", "url": "https://overcloud.redhat.local:13778/placement", "region": "regionOne"}, {"id": "f5d9ae88e0fe4737816850954f5ee07e", "interface": "admin", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:8778/placement", "region": "regionOne"}], "id": "3602c7ee3242410f9b501a6a81202be0", "type": "placement", "name": "placement"}, {"endpoints": [{"id": "024aeb5a469643a2a1742cac264fc62e", "interface": "internal", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:8004/v1/34730f4be8c54c569b0059786c6e100a", "region": "regionOne"}, {"id": "97b5dd898e8d4652a9683fceb1e17417", "interface": "admin", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:8004/v1/34730f4be8c54c569b0059786c6e100a", "region": "regionOne"}, {"id": "f1cb7a31ee294117b324a8c6f165f9bd", "interface": "public", "region_id": "regionOne", "url": "https://overcloud.redhat.local:13004/v1/34730f4be8c54c569b0059786c6e100a", "region": "regionOne"}], "id": "658a3002a8834bdf8f8a17641a5e64fa", "type": "orchestration", "name": "heat"}, {"endpoints": [{"id": "1e0f347e07954e1daa8cc1d9a1995c32", "interface": "internal", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:8776/v3/34730f4be8c54c569b0059786c6e100a", "region": "regionOne"}, {"id": "2a7ef36db26745648a745d335ed0c7da", "interface": "admin", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:8776/v3/34730f4be8c54c569b0059786c6e100a", "region": "regionOne"}, {"id": "ea467ac38a2f4274aed9f885a60f8f22", "interface": "public", "region_id": "regionOne", "url": "https://overcloud.redhat.local:13776/v3/34730f4be8c54c569b0059786c6e100a", "region": "regionOne"}], "id": "746d540f633f47fdadb42a47a006a53d", "type": "volumev3", "name": "cinderv3"}, {"endpoints": [{"id": "9f766ff00dd946d1b0733f71a14e20fa", "interface": "admin", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:9876", "region": "regionOne"}, {"id": "ab844da69cd440068e4d22e43328549a", "interface": "internal", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:9876", "region": "regionOne"}, {"id": "fc8e322af23040dd84fc49623b767555", "interface": "public", "region_id": "regionOne", "url": "https://overcloud.redhat.local:13876", "region": "regionOne"}], "id": "89873f52593f4febb67a8f4777e1b8fe", "type": "load-balancer", "name": "octavia"}, {"endpoints": [{"id": "4af6105f51fe49be9977733043bcc663", "interface": "public", "region_id": "regionOne", "url": "https://overcloud.redhat.local:13808/v1/AUTH_34730f4be8c54c569b0059786c6e100a", "region": "regionOne"}, {"id": "c6f2836eb8474000ba519374e08c0718", "interface": "internal", "region_id": "regionOne", "url": "https://overcloud.storage.redhat.local:8080/v1/AUTH_34730f4be8c54c569b0059786c6e100a", "region": "regionOne"}, {"id": "dfe95e784b10461eb54e5a559d9d8519", "interface": "admin", "region_id": "regionOne", "url": "https://overcloud.storage.redhat.local:8080", "region": "regionOne"}], "id": "8c42aaaaeea240329068d2747a241c7a", "type": "object-store", "name": "swift"}, {"endpoints": [{"id": "42b7c7d88a2e43e38daec13674d2d3e4", "interface": "admin", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:8000/v1", "region": "regionOne"}, {"id": "c483eb4c24d44163944f961048d15709", "interface": "public", "region_id": "regionOne", "url": "https://overcloud.redhat.local:13005/v1", "region": "regionOne"}, {"id": "dbd00c1c249b4920be9ac149f7f5f700", "interface": "internal", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:8000/v1", "region": "regionOne"}], "id": "9fe809aa90af4b4090f5b6487e837369", "type": "cloudformation", "name": "heat-cfn"}, {"endpoints": [{"id": "052bca56ad9d46eeaa8992d9a11381de", "interface": "admin", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:9696", "region": "regionOne"}, {"id": "d16ca77062fa4884ab35e20b1e5a85b6", "interface": "internal", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:9696", "region": "regionOne"}, {"id": "fafe23c9370c471dbaaca291d9c80a34", "interface": "public", "region_id": "regionOne", "url": "https://overcloud.redhat.local:13696", "region": "regionOne"}], "id": "a0626844d0aa434d8212e92977f51dc8", "type": "network", "name": "neutron"}, {"endpoints": [{"id": "51fe869d894b4d12b0e6547a360b3a15", "interface": "internal", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:8776/v2/34730f4be8c54c569b0059786c6e100a", "region": "regionOne"}, {"id": "da99aa211700472fae4404709d89574c", "interface": "public", "region_id": "regionOne", "url": "https://overcloud.redhat.local:13776/v2/34730f4be8c54c569b0059786c6e100a", "region": "regionOne"}, {"id": "f0722b8a002d4a84825aacf180949c51", "interface": "admin", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:8776/v2/34730f4be8c54c569b0059786c6e100a", "region": "regionOne"}], "id": "e565d5609fe349479e748c62d532b882", "type": "volumev2", "name": "cinderv2"}]}}
352run(Namespace(columns=[], description=None, disable=None, enable=True, fit_width=False, flavor=None, formatter='table', max_width=0, name='lb1', noindent=False, prefix='', print_empty=False, project=None, provider=None, variables=[], vip_address=None, vip_network_id=None, vip_port_id=None, vip_qos_policy_id=None, vip_subnet_id='private-1-subnet-1'))
353Instantiating identity client: <class 'keystoneclient.v3.client.Client'>
354Instantiating neutron client: <class 'neutronclient.v2_0.client.Client'>
355Making authentication request to https://overcloud.redhat.local:13000/v3/auth/tokens
356https://overcloud.redhat.local:13000 "POST /v3/auth/tokens HTTP/1.1" 201 7898
357{"token": {"methods": ["password"], "user": {"domain": {"id": "default", "name": "Default"}, "id": "1cda561adebf493b9ba3f3686de23522", "name": "admin", "password_expires_at": null}, "audit_ids": ["1Vy66IUhT66cMqNsQKc0Jw"], "expires_at": "2020-03-10T16:48:03.000000Z", "issued_at": "2020-03-10T15:48:03.000000Z", "project": {"domain": {"id": "default", "name": "Default"}, "id": "34730f4be8c54c569b0059786c6e100a", "name": "admin"}, "is_domain": false, "roles": [{"id": "50c378d402db40e187e2558f75cc5ea5", "name": "member"}, {"id": "35108f06c3814bedadd524f98961c402", "name": "reader"}, {"id": "ec2ea884e3a3413199b04aacdaf1bf4e", "name": "_member_"}, {"id": "e3988e47d7bd499b85adc897575b93a0", "name": "admin"}], "catalog": [{"endpoints": [{"id": "3922473da40c4cc28d580caf892cefe5", "interface": "admin", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:9292", "region": "regionOne"}, {"id": "3d79504528ca4cdea4d9def8128584ae", "interface": "internal", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:9292", "region": "regionOne"}, {"id": "aa4be6ca56014583b573f19d7bfa3414", "interface": "public", "region_id": "regionOne", "url": "https://overcloud.redhat.local:13292", "region": "regionOne"}], "id": "01c96d92cb0a4cf99caf14894a4b6fe2", "type": "image", "name": "glance"}, {"endpoints": [{"id": "3ea1e66c5c38499e8a120383c83d5c82", "interface": "internal", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:8774/v2.1", "region": "regionOne"}, {"id": "70dd2ae2b77b4bb58d58b68fdf22d097", "interface": "public", "region_id": "regionOne", "url": "https://overcloud.redhat.local:13774/v2.1", "region": "regionOne"}, {"id": "75f53cd0dd094341a477d2351dd3a68b", "interface": "admin", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:8774/v2.1", "region": "regionOne"}], "id": "23d7862648774dfaa659f03c891e0d32", "type": "compute", "name": "nova"}, {"endpoints": [{"id": "68b7dfc435674139b7a69a7e82e0f411", "interface": "admin", "region_id": "regionOne", "url": "https://overcloud.ctlplane.redhat.local:35357", "region": "regionOne"}, {"id": "98c96768cf2a49279d41fb4f8fa2b25a", "interface": "public", "region_id": "regionOne", "url": "https://overcloud.redhat.local:13000", "region": "regionOne"}, {"id": "d745eccd33fb42b188d32e91e3bdf608", "interface": "internal", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:5000", "region": "regionOne"}], "id": "34efa591adb741d7b3d96f96599ab0ce", "type": "identity", "name": "keystone"}, {"endpoints": [{"id": "5e395d93f03240e99d0b4f077ed1c275", "interface": "internal", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:8778/placement", "region": "regionOne"}, {"id": "cd48a84fc4c64abba6c658e582f71471", "interface": "public", "region_id": "regionOne", "url": "https://overcloud.redhat.local:13778/placement", "region": "regionOne"}, {"id": "f5d9ae88e0fe4737816850954f5ee07e", "interface": "admin", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:8778/placement", "region": "regionOne"}], "id": "3602c7ee3242410f9b501a6a81202be0", "type": "placement", "name": "placement"}, {"endpoints": [{"id": "024aeb5a469643a2a1742cac264fc62e", "interface": "internal", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:8004/v1/34730f4be8c54c569b0059786c6e100a", "region": "regionOne"}, {"id": "97b5dd898e8d4652a9683fceb1e17417", "interface": "admin", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:8004/v1/34730f4be8c54c569b0059786c6e100a", "region": "regionOne"}, {"id": "f1cb7a31ee294117b324a8c6f165f9bd", "interface": "public", "region_id": "regionOne", "url": "https://overcloud.redhat.local:13004/v1/34730f4be8c54c569b0059786c6e100a", "region": "regionOne"}], "id": "658a3002a8834bdf8f8a17641a5e64fa", "type": "orchestration", "name": "heat"}, {"endpoints": [{"id": "1e0f347e07954e1daa8cc1d9a1995c32", "interface": "internal", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:8776/v3/34730f4be8c54c569b0059786c6e100a", "region": "regionOne"}, {"id": "2a7ef36db26745648a745d335ed0c7da", "interface": "admin", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:8776/v3/34730f4be8c54c569b0059786c6e100a", "region": "regionOne"}, {"id": "ea467ac38a2f4274aed9f885a60f8f22", "interface": "public", "region_id": "regionOne", "url": "https://overcloud.redhat.local:13776/v3/34730f4be8c54c569b0059786c6e100a", "region": "regionOne"}], "id": "746d540f633f47fdadb42a47a006a53d", "type": "volumev3", "name": "cinderv3"}, {"endpoints": [{"id": "9f766ff00dd946d1b0733f71a14e20fa", "interface": "admin", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:9876", "region": "regionOne"}, {"id": "ab844da69cd440068e4d22e43328549a", "interface": "internal", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:9876", "region": "regionOne"}, {"id": "fc8e322af23040dd84fc49623b767555", "interface": "public", "region_id": "regionOne", "url": "https://overcloud.redhat.local:13876", "region": "regionOne"}], "id": "89873f52593f4febb67a8f4777e1b8fe", "type": "load-balancer", "name": "octavia"}, {"endpoints": [{"id": "4af6105f51fe49be9977733043bcc663", "interface": "public", "region_id": "regionOne", "url": "https://overcloud.redhat.local:13808/v1/AUTH_34730f4be8c54c569b0059786c6e100a", "region": "regionOne"}, {"id": "c6f2836eb8474000ba519374e08c0718", "interface": "internal", "region_id": "regionOne", "url": "https://overcloud.storage.redhat.local:8080/v1/AUTH_34730f4be8c54c569b0059786c6e100a", "region": "regionOne"}, {"id": "dfe95e784b10461eb54e5a559d9d8519", "interface": "admin", "region_id": "regionOne", "url": "https://overcloud.storage.redhat.local:8080", "region": "regionOne"}], "id": "8c42aaaaeea240329068d2747a241c7a", "type": "object-store", "name": "swift"}, {"endpoints": [{"id": "42b7c7d88a2e43e38daec13674d2d3e4", "interface": "admin", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:8000/v1", "region": "regionOne"}, {"id": "c483eb4c24d44163944f961048d15709", "interface": "public", "region_id": "regionOne", "url": "https://overcloud.redhat.local:13005/v1", "region": "regionOne"}, {"id": "dbd00c1c249b4920be9ac149f7f5f700", "interface": "internal", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:8000/v1", "region": "regionOne"}], "id": "9fe809aa90af4b4090f5b6487e837369", "type": "cloudformation", "name": "heat-cfn"}, {"endpoints": [{"id": "052bca56ad9d46eeaa8992d9a11381de", "interface": "admin", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:9696", "region": "regionOne"}, {"id": "d16ca77062fa4884ab35e20b1e5a85b6", "interface": "internal", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:9696", "region": "regionOne"}, {"id": "fafe23c9370c471dbaaca291d9c80a34", "interface": "public", "region_id": "regionOne", "url": "https://overcloud.redhat.local:13696", "region": "regionOne"}], "id": "a0626844d0aa434d8212e92977f51dc8", "type": "network", "name": "neutron"}, {"endpoints": [{"id": "51fe869d894b4d12b0e6547a360b3a15", "interface": "internal", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:8776/v2/34730f4be8c54c569b0059786c6e100a", "region": "regionOne"}, {"id": "da99aa211700472fae4404709d89574c", "interface": "public", "region_id": "regionOne", "url": "https://overcloud.redhat.local:13776/v2/34730f4be8c54c569b0059786c6e100a", "region": "regionOne"}, {"id": "f0722b8a002d4a84825aacf180949c51", "interface": "admin", "region_id": "regionOne", "url": "https://overcloud.internalapi.redhat.local:8776/v2/34730f4be8c54c569b0059786c6e100a", "region": "regionOne"}], "id": "e565d5609fe349479e748c62d532b882", "type": "volumev2", "name": "cinderv2"}]}}
358REQ: curl -g -i -X GET https://overcloud.redhat.local:13696/v2.0/subnets -H "Accept: application/json" -H "User-Agent: python-neutronclient" -H "X-Auth-Token: {SHA256}9900ae57cf1e909b51f08557dbfc139db365b2df1ff920e1cb6b2ba5f279a6b3"
359Starting new HTTPS connection (1): overcloud.redhat.local:13696
360https://overcloud.redhat.local:13696 "GET /v2.0/subnets HTTP/1.1" 200 585
361RESP: [200] Content-Encoding: gzip Content-Length: 585 Content-Type: application/json Date: Tue, 10 Mar 2020 15:48:03 GMT Server: Apache Vary: Accept-Encoding Via: 1.1 controller-3.internalapi.redhat.local:13696 X-Openstack-Request-Id: req-976edbb6-b171-4744-9f2b-7057c230e2d6
362RESP BODY: {"subnets":[{"id":"56b9356d-d4fd-4044-989c-e50bff08ad99","name":"lb-mgmt-subnet","tenant_id":"34730f4be8c54c569b0059786c6e100a","network_id":"01d4d6c6-a1f4-4983-aa8b-b0e24ccbdfd8","ip_version":4,"subnetpool_id":null,"enable_dhcp":true,"ipv6_ra_mode":null,"ipv6_address_mode":null,"gateway_ip":"172.24.0.1","cidr":"172.24.0.0/16","allocation_pools":[{"start":"172.24.0.2","end":"172.24.255.254"}],"host_routes":[],"dns_nameservers":[],"description":"","service_types":[],"segment_id":null,"tags":[],"created_at":"2020-03-10T09:30:04Z","updated_at":"2020-03-10T09:30:04Z","revision_number":0,"project_id":"34730f4be8c54c569b0059786c6e100a"},{"id":"8ff75637-9476-4b04-87e3-e4b6381dd66c","name":"floating-200-subnet","tenant_id":"34730f4be8c54c569b0059786c6e100a","network_id":"56be27f2-4e30-4544-ba68-1ddf9713093f","ip_version":4,"subnetpool_id":null,"enable_dhcp":true,"ipv6_ra_mode":null,"ipv6_address_mode":null,"gateway_ip":"172.16.200.250","cidr":"172.16.200.0/24","allocation_pools":[{"start":"172.16.200.151","end":"172.16.200.179"}],"host_routes":[],"dns_nameservers":[],"description":"","service_types":[],"segment_id":null,"tags":[],"created_at":"2020-03-10T10:14:53Z","updated_at":"2020-03-10T10:14:53Z","revision_number":0,"project_id":"34730f4be8c54c569b0059786c6e100a"},{"id":"aa3d1ad1-f75c-4de2-adcc-b7971e5ca8ed","name":"private-1-subnet-1","tenant_id":"34730f4be8c54c569b0059786c6e100a","network_id":"d2263e5c-9f6f-4356-81a7-ba292500abc7","ip_version":4,"subnetpool_id":null,"enable_dhcp":true,"ipv6_ra_mode":null,"ipv6_address_mode":null,"gateway_ip":"192.168.9.1","cidr":"192.168.9.0/24","allocation_pools":[{"start":"192.168.9.2","end":"192.168.9.254"}],"host_routes":[],"dns_nameservers":[],"description":"","service_types":[],"segment_id":null,"tags":[],"created_at":"2020-03-10T10:14:23Z","updated_at":"2020-03-10T10:14:23Z","revision_number":0,"project_id":"34730f4be8c54c569b0059786c6e100a"}]}
363GET call to network for https://overcloud.redhat.local:13696/v2.0/subnets used request id req-976edbb6-b171-4744-9f2b-7057c230e2d6
364REQ: curl -g -i -X POST https://overcloud.redhat.local:13876/v2.0/lbaas/loadbalancers -H "Accept: application/json" -H "Content-Type: application/json" -H "User-Agent: openstacksdk/0.36.0 keystoneauth1/3.17.1 python-requests/2.20.0 CPython/3.6.8" -H "X-Auth-Token: {SHA256}9900ae57cf1e909b51f08557dbfc139db365b2df1ff920e1cb6b2ba5f279a6b3" -d '{"loadbalancer": {"name": "lb1", "vip_subnet_id": "aa3d1ad1-f75c-4de2-adcc-b7971e5ca8ed", "admin_state_up": true}}'
365Starting new HTTPS connection (1): overcloud.redhat.local:13876
366https://overcloud.redhat.local:13876 "POST /v2.0/lbaas/loadbalancers HTTP/1.1" 503 None
367RESP: [503] Cache-Control: no-cache Connection: close Content-Type: text/html
368RESP BODY: Omitted, Content-Type is set to text/html. Only application/json responses have their bodies logged.
369Request returned failure status: 503
370Expecting value: line 1 column 1 (char 0)
371Traceback (most recent call last):
372 File "/usr/lib/python3.6/site-packages/octaviaclient/api/v2/octavia.py", line 31, in wrapper
373 response = func(*args, **kwargs)
374 File "/usr/lib/python3.6/site-packages/octaviaclient/api/v2/octavia.py", line 113, in load_balancer_create
375 response = self._create(url, **params)
376 File "/usr/lib/python3.6/site-packages/osc_lib/api/api.py", line 164, in create
377 ret = self._request(method, url, session=session, **params)
378 File "/usr/lib/python3.6/site-packages/osc_lib/api/api.py", line 141, in _request
379 return session.request(url, method, **kwargs)
380 File "/usr/lib/python3.6/site-packages/keystoneauth1/session.py", line 943, in request
381 raise exceptions.from_response(resp, method, url)
382keystoneauth1.exceptions.http.ServiceUnavailable: Service Unavailable (HTTP 503)
383
384During handling of the above exception, another exception occurred:
385
386Traceback (most recent call last):
387 File "/usr/lib/python3.6/site-packages/cliff/app.py", line 401, in run_subcommand
388 result = cmd.run(parsed_args)
389 File "/usr/lib/python3.6/site-packages/osc_lib/command/command.py", line 41, in run
390 return super(Command, self).run(parsed_args)
391 File "/usr/lib/python3.6/site-packages/cliff/display.py", line 116, in run
392 column_names, data = self.take_action(parsed_args)
393 File "/usr/lib/python3.6/site-packages/octaviaclient/osc/v2/load_balancer.py", line 132, in take_action
394 json=body)
395 File "/usr/lib/python3.6/site-packages/octaviaclient/api/v2/octavia.py", line 35, in wrapper
396 message = e.response.json().get(
397 File "/usr/lib/python3.6/site-packages/requests/models.py", line 897, in json
398 return complexjson.loads(self.text, **kwargs)
399 File "/usr/lib64/python3.6/site-packages/simplejson/__init__.py", line 518, in loads
400 return _default_decoder.decode(s)
401 File "/usr/lib64/python3.6/site-packages/simplejson/decoder.py", line 370, in decode
402 obj, end = self.raw_decode(s)
403 File "/usr/lib64/python3.6/site-packages/simplejson/decoder.py", line 400, in raw_decode
404 return self.scan_once(s, idx=_w(s, idx).end())
405simplejson.errors.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
406clean_up CreateLoadBalancer: Expecting value: line 1 column 1 (char 0)
407Traceback (most recent call last):
408 File "/usr/lib/python3.6/site-packages/octaviaclient/api/v2/octavia.py", line 31, in wrapper
409 response = func(*args, **kwargs)
410 File "/usr/lib/python3.6/site-packages/octaviaclient/api/v2/octavia.py", line 113, in load_balancer_create
411 response = self._create(url, **params)
412 File "/usr/lib/python3.6/site-packages/osc_lib/api/api.py", line 164, in create
413 ret = self._request(method, url, session=session, **params)
414 File "/usr/lib/python3.6/site-packages/osc_lib/api/api.py", line 141, in _request
415 return session.request(url, method, **kwargs)
416 File "/usr/lib/python3.6/site-packages/keystoneauth1/session.py", line 943, in request
417 raise exceptions.from_response(resp, method, url)
418keystoneauth1.exceptions.http.ServiceUnavailable: Service Unavailable (HTTP 503)
419
420During handling of the above exception, another exception occurred:
421
422Traceback (most recent call last):
423 File "/usr/lib/python3.6/site-packages/osc_lib/shell.py", line 136, in run
424 ret_val = super(OpenStackShell, self).run(argv)
425 File "/usr/lib/python3.6/site-packages/cliff/app.py", line 281, in run
426 result = self.run_subcommand(remainder)
427 File "/usr/lib/python3.6/site-packages/osc_lib/shell.py", line 176, in run_subcommand
428 ret_value = super(OpenStackShell, self).run_subcommand(argv)
429 File "/usr/lib/python3.6/site-packages/cliff/app.py", line 401, in run_subcommand
430 result = cmd.run(parsed_args)
431 File "/usr/lib/python3.6/site-packages/osc_lib/command/command.py", line 41, in run
432 return super(Command, self).run(parsed_args)
433 File "/usr/lib/python3.6/site-packages/cliff/display.py", line 116, in run
434 column_names, data = self.take_action(parsed_args)
435 File "/usr/lib/python3.6/site-packages/octaviaclient/osc/v2/load_balancer.py", line 132, in take_action
436 json=body)
437 File "/usr/lib/python3.6/site-packages/octaviaclient/api/v2/octavia.py", line 35, in wrapper
438 message = e.response.json().get(
439 File "/usr/lib/python3.6/site-packages/requests/models.py", line 897, in json
440 return complexjson.loads(self.text, **kwargs)
441 File "/usr/lib64/python3.6/site-packages/simplejson/__init__.py", line 518, in loads
442 return _default_decoder.decode(s)
443 File "/usr/lib64/python3.6/site-packages/simplejson/decoder.py", line 370, in decode
444 obj, end = self.raw_decode(s)
445 File "/usr/lib64/python3.6/site-packages/simplejson/decoder.py", line 400, in raw_decode
446 return self.scan_once(s, idx=_w(s, idx).end())
447simplejson.errors.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
448
449END return value: 1
450
451in keystone.log:
452
4532020-03-10 09:24:06.122 28 WARNING keystone.server.flask.application [req-292489a1-523d-4d1f-b52c-964c4ba224fc - - - - -] Could not find user: octavia.: keystone.exception.UserNotFound: Could not find user: octavia.