Coverage for manila/share/drivers/huawei/v3/replication.py: 99%

126 statements  

« prev     ^ index     » next       coverage.py v7.11.0, created at 2026-02-18 22:19 +0000

1# Copyright (c) 2016 Huawei Technologies Co., Ltd. 

2# All Rights Reserved. 

3# 

4# Licensed under the Apache License, Version 2.0 (the "License"); you may 

5# not use this file except in compliance with the License. You may obtain 

6# a copy of the License at 

7# 

8# http://www.apache.org/licenses/LICENSE-2.0 

9# 

10# Unless required by applicable law or agreed to in writing, software 

11# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 

12# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 

13# License for the specific language governing permissions and limitations 

14# under the License. 

15 

16from oslo_log import log 

17from oslo_utils import strutils 

18 

19from manila.common import constants as common_constants 

20from manila import exception 

21from manila.i18n import _ 

22from manila.share.drivers.huawei import constants 

23 

24 

25LOG = log.getLogger(__name__) 

26 

27 

28class ReplicaPairManager(object): 

29 def __init__(self, helper): 

30 self.helper = helper 

31 

32 def create(self, local_share_info, remote_device_wwn, remote_fs_id): 

33 local_share_name = local_share_info.get('name') 

34 

35 try: 

36 local_fs_id = self.helper.get_fsid_by_name(local_share_name) 

37 if not local_fs_id: 

38 msg = _("Local fs was not found by name %s.") 

39 LOG.error(msg, local_share_name) 

40 raise exception.ReplicationException( 

41 reason=msg % local_share_name) 

42 

43 remote_device = self.helper.get_remote_device_by_wwn( 

44 remote_device_wwn) 

45 pair_params = { 

46 "LOCALRESID": local_fs_id, 

47 "LOCALRESTYPE": constants.FILE_SYSTEM_TYPE, 

48 "REMOTEDEVICEID": remote_device.get('ID'), 

49 "REMOTEDEVICENAME": remote_device.get('NAME'), 

50 "REMOTERESID": remote_fs_id, 

51 "REPLICATIONMODEL": constants.REPLICA_ASYNC_MODEL, 

52 "RECOVERYPOLICY": '2', 

53 "SYNCHRONIZETYPE": '1', 

54 "SPEED": constants.REPLICA_SPEED_MEDIUM, 

55 } 

56 

57 pair_info = self.helper.create_replication_pair(pair_params) 

58 except Exception: 

59 msg = ("Failed to create replication pair for share %s.") 

60 LOG.exception(msg, local_share_name) 

61 raise 

62 

63 self._sync_replication_pair(pair_info['ID']) 

64 

65 return pair_info['ID'] 

66 

67 def _get_replication_pair_info(self, replica_pair_id): 

68 try: 

69 pair_info = self.helper.get_replication_pair_by_id( 

70 replica_pair_id) 

71 except Exception: 

72 LOG.exception('Failed to get replication pair info for ' 

73 '%s.', replica_pair_id) 

74 raise 

75 

76 return pair_info 

77 

78 def _check_replication_health(self, pair_info): 

79 if (pair_info['HEALTHSTATUS'] != 

80 constants.REPLICA_HEALTH_STATUS_NORMAL): 

81 return common_constants.STATUS_ERROR 

82 

83 def _check_replication_running_status(self, pair_info): 

84 if (pair_info['RUNNINGSTATUS'] in ( 

85 constants.REPLICA_RUNNING_STATUS_SPLITTED, 

86 constants.REPLICA_RUNNING_STATUS_TO_RECOVER)): 

87 return common_constants.REPLICA_STATE_OUT_OF_SYNC 

88 

89 if (pair_info['RUNNINGSTATUS'] in ( 

90 constants.REPLICA_RUNNING_STATUS_INTERRUPTED, 

91 constants.REPLICA_RUNNING_STATUS_INVALID)): 

92 return common_constants.STATUS_ERROR 

93 

94 def _check_replication_secondary_data_status(self, pair_info): 

95 if (pair_info['SECRESDATASTATUS'] in 

96 constants.REPLICA_DATA_STATUS_IN_SYNC): 

97 return common_constants.REPLICA_STATE_IN_SYNC 

98 else: 

99 return common_constants.REPLICA_STATE_OUT_OF_SYNC 

100 

101 def _check_replica_state(self, pair_info): 

102 result = self._check_replication_health(pair_info) 

103 if result is not None: 

104 return result 

105 

106 result = self._check_replication_running_status(pair_info) 

107 if result is not None: 

108 return result 

109 

110 return self._check_replication_secondary_data_status(pair_info) 

111 

112 def get_replica_state(self, replica_pair_id): 

113 try: 

114 pair_info = self._get_replication_pair_info(replica_pair_id) 

115 except Exception: 

116 # if cannot communicate to backend, return error 

117 LOG.error('Cannot get replica state, return %s', 

118 common_constants.STATUS_ERROR) 

119 return common_constants.STATUS_ERROR 

120 

121 return self._check_replica_state(pair_info) 

122 

123 def _sync_replication_pair(self, pair_id): 

124 try: 

125 self.helper.sync_replication_pair(pair_id) 

126 except Exception as err: 

127 LOG.warning('Failed to sync replication pair %(id)s. ' 

128 'Reason: %(err)s', 

129 {'id': pair_id, 'err': err}) 

130 

131 def update_replication_pair_state(self, replica_pair_id): 

132 pair_info = self._get_replication_pair_info(replica_pair_id) 

133 

134 health = self._check_replication_health(pair_info) 

135 if health is not None: 

136 LOG.warning("Cannot update the replication %s " 

137 "because it's not in normal status.", 

138 replica_pair_id) 

139 return 

140 

141 if strutils.bool_from_string(pair_info['ISPRIMARY']): 

142 # current replica is primary, not consistent with manila. 

143 # the reason for this circumstance is the last switch over 

144 # didn't succeed completely. continue the switch over progress.. 

145 try: 

146 self.helper.switch_replication_pair(replica_pair_id) 

147 except Exception: 

148 msg = ('Replication pair %s primary/secondary ' 

149 'relationship is not right, try to switch over ' 

150 'again but still failed.') 

151 LOG.exception(msg, replica_pair_id) 

152 return 

153 

154 # refresh the replication pair info 

155 pair_info = self._get_replication_pair_info(replica_pair_id) 

156 

157 if pair_info['SECRESACCESS'] == constants.REPLICA_SECONDARY_RW: 

158 try: 

159 self.helper.set_pair_secondary_write_lock(replica_pair_id) 

160 except Exception: 

161 msg = ('Replication pair %s secondary access is R/W, ' 

162 'try to set write lock but still failed.') 

163 LOG.exception(msg, replica_pair_id) 

164 return 

165 

166 if pair_info['RUNNINGSTATUS'] in ( 166 ↛ exitline 166 didn't return from function 'update_replication_pair_state' because the condition on line 166 was always true

167 constants.REPLICA_RUNNING_STATUS_NORMAL, 

168 constants.REPLICA_RUNNING_STATUS_SPLITTED, 

169 constants.REPLICA_RUNNING_STATUS_TO_RECOVER): 

170 self._sync_replication_pair(replica_pair_id) 

171 

172 def switch_over(self, replica_pair_id): 

173 pair_info = self._get_replication_pair_info(replica_pair_id) 

174 

175 if strutils.bool_from_string(pair_info['ISPRIMARY']): 

176 LOG.warning('The replica to promote is already primary, ' 

177 'no need to switch over.') 

178 return 

179 

180 replica_state = self._check_replica_state(pair_info) 

181 if replica_state != common_constants.REPLICA_STATE_IN_SYNC: 

182 # replica is not in SYNC state, can't be promoted 

183 msg = _('Data of replica %s is not synchronized, ' 

184 'can not promote.') 

185 raise exception.ReplicationException( 

186 reason=msg % replica_pair_id) 

187 

188 try: 

189 self.helper.split_replication_pair(replica_pair_id) 

190 except Exception: 

191 # split failed 

192 # means replication pair is in an abnormal status, 

193 # ignore this exception, continue to cancel secondary write lock, 

194 # let secondary share accessible for disaster recovery. 

195 LOG.exception('Failed to split replication pair %s while ' 

196 'switching over.', replica_pair_id) 

197 

198 try: 

199 self.helper.cancel_pair_secondary_write_lock(replica_pair_id) 

200 except Exception: 

201 LOG.exception('Failed to cancel replication pair %s ' 

202 'secondary write lock.', replica_pair_id) 

203 raise 

204 

205 try: 

206 self.helper.switch_replication_pair(replica_pair_id) 

207 self.helper.set_pair_secondary_write_lock(replica_pair_id) 

208 self.helper.sync_replication_pair(replica_pair_id) 

209 except Exception: 

210 LOG.exception('Failed to completely switch over ' 

211 'replication pair %s.', replica_pair_id) 

212 

213 # for all the rest steps, 

214 # because secondary share is accessible now, 

215 # the upper business may access the secondary share, 

216 # return success to tell replica is primary. 

217 return 

218 

219 def delete_replication_pair(self, replica_pair_id): 

220 try: 

221 self.helper.split_replication_pair(replica_pair_id) 

222 except Exception: 

223 # Ignore this exception because replication pair may at some 

224 # abnormal status that supports deleting. 

225 LOG.warning('Failed to split replication pair %s ' 

226 'before deleting it. Ignore this exception, ' 

227 'and try to delete anyway.', 

228 replica_pair_id) 

229 

230 try: 

231 self.helper.delete_replication_pair(replica_pair_id) 

232 except Exception: 

233 LOG.exception('Failed to delete replication pair %s.', 

234 replica_pair_id) 

235 raise 

236 

237 def create_replica_pair(self, ctx, 

238 local_share_info, 

239 remote_device_wwn, 

240 remote_fs_id): 

241 """Create replication pair for RPC call. 

242 

243 This is for remote call, because replica pair can only be created 

244 by master node. 

245 """ 

246 return self.create(local_share_info, 

247 remote_device_wwn, 

248 remote_fs_id)