| // Licensed to the Apache Software Foundation (ASF) under one |
| // or more contributor license agreements. See the NOTICE file |
| // distributed with this work for additional information |
| // regarding copyright ownership. The ASF licenses this file |
| // to you under the Apache License, Version 2.0 (the |
| // "License"); you may not use this file except in compliance |
| // with the License. You may obtain a copy of the License at |
| // |
| // http://www.apache.org/licenses/LICENSE-2.0 |
| // |
| // Unless required by applicable law or agreed to in writing, |
| // software distributed under the License is distributed on an |
| // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY |
| // KIND, either express or implied. See the License for the |
| // specific language governing permissions and limitations |
| // under the License. |
| |
| package com.cloud.upgrade.dao; |
| |
| import java.io.InputStream; |
| import java.io.UnsupportedEncodingException; |
| import java.sql.Connection; |
| import java.sql.PreparedStatement; |
| import java.sql.ResultSet; |
| import java.sql.SQLException; |
| import java.util.ArrayList; |
| import java.util.HashMap; |
| import java.util.List; |
| import java.util.Map; |
| |
| import org.apache.log4j.Logger; |
| |
| import com.cloud.utils.crypt.DBEncryptionUtil; |
| import com.cloud.utils.exception.CloudRuntimeException; |
| |
| public class Upgrade442to450 implements DbUpgrade { |
| final static Logger s_logger = Logger.getLogger(Upgrade442to450.class); |
| |
| @Override |
| public String[] getUpgradableVersionRange() { |
| return new String[] {"4.4.2", "4.5.0"}; |
| } |
| |
| @Override |
| public String getUpgradedVersion() { |
| return "4.5.0"; |
| } |
| |
| @Override |
| public boolean supportsRollingUpgrade() { |
| return false; |
| } |
| |
| @Override |
| public InputStream[] getPrepareScripts() { |
| final String scriptFile = "META-INF/db/schema-442to450.sql"; |
| final InputStream script = Thread.currentThread().getContextClassLoader().getResourceAsStream(scriptFile); |
| if (script == null) { |
| throw new CloudRuntimeException("Unable to find " + scriptFile); |
| } |
| |
| return new InputStream[] {script}; |
| } |
| |
| @Override |
| public void performDataMigration(Connection conn) { |
| dropInvalidKeyFromStoragePoolTable(conn); |
| dropDuplicatedForeignKeyFromAsyncJobTable(conn); |
| updateMaxRouterSizeConfig(conn); |
| upgradeMemoryOfVirtualRoutervmOffering(conn); |
| upgradeMemoryOfInternalLoadBalancervmOffering(conn); |
| } |
| |
| private void updateMaxRouterSizeConfig(Connection conn) { |
| String sqlUpdateConfig = "UPDATE `cloud`.`configuration` SET value=? WHERE name='router.ram.size' AND category='Hidden'"; |
| try (PreparedStatement updatePstmt = conn.prepareStatement(sqlUpdateConfig);){ |
| String encryptedValue = DBEncryptionUtil.encrypt("256"); |
| updatePstmt.setBytes(1, encryptedValue.getBytes("UTF-8")); |
| updatePstmt.executeUpdate(); |
| } catch (SQLException e) { |
| throw new CloudRuntimeException("Unable to upgrade max ram size of router in config.", e); |
| } catch (UnsupportedEncodingException e) { |
| throw new CloudRuntimeException("Unable encrypt configuration values ", e); |
| } |
| s_logger.debug("Done updating router.ram.size config to 256"); |
| } |
| |
| private void upgradeMemoryOfVirtualRoutervmOffering(Connection conn) { |
| int newRamSize = 256; //256MB |
| long serviceOfferingId = 0; |
| |
| /** |
| * Pick first row in service_offering table which has system vm type as domainrouter. User added offerings would start from 2nd row onwards. |
| * We should not update/modify any user-defined offering. |
| */ |
| |
| try ( |
| PreparedStatement selectPstmt = conn.prepareStatement("SELECT id FROM `cloud`.`service_offering` WHERE vm_type='domainrouter'"); |
| PreparedStatement updatePstmt = conn.prepareStatement("UPDATE `cloud`.`service_offering` SET ram_size=? WHERE id=?"); |
| ResultSet selectResultSet = selectPstmt.executeQuery(); |
| ) { |
| if(selectResultSet.next()) { |
| serviceOfferingId = selectResultSet.getLong("id"); |
| } |
| |
| updatePstmt.setInt(1, newRamSize); |
| updatePstmt.setLong(2, serviceOfferingId); |
| updatePstmt.executeUpdate(); |
| } catch (SQLException e) { |
| throw new CloudRuntimeException("Unable to upgrade ram_size of service offering for domain router. ", e); |
| } |
| s_logger.debug("Done upgrading RAM for service offering of domain router to " + newRamSize); |
| } |
| |
| private void upgradeMemoryOfInternalLoadBalancervmOffering(Connection conn) { |
| int newRamSize = 256; //256MB |
| long serviceOfferingId = 0; |
| |
| /** |
| * Pick first row in service_offering table which has system vm type as internalloadbalancervm. User added offerings would start from 2nd row onwards. |
| * We should not update/modify any user-defined offering. |
| */ |
| |
| try (PreparedStatement selectPstmt = conn.prepareStatement("SELECT id FROM `cloud`.`service_offering` WHERE vm_type='internalloadbalancervm'"); |
| PreparedStatement updatePstmt = conn.prepareStatement("UPDATE `cloud`.`service_offering` SET ram_size=? WHERE id=?"); |
| ResultSet selectResultSet = selectPstmt.executeQuery()){ |
| if(selectResultSet.next()) { |
| serviceOfferingId = selectResultSet.getLong("id"); |
| } |
| |
| updatePstmt.setInt(1, newRamSize); |
| updatePstmt.setLong(2, serviceOfferingId); |
| updatePstmt.executeUpdate(); |
| } catch (SQLException e) { |
| throw new CloudRuntimeException("Unable to upgrade ram_size of service offering for internal loadbalancer vm. ", e); |
| } |
| s_logger.debug("Done upgrading RAM for service offering of internal loadbalancer vm to " + newRamSize); |
| } |
| |
| @Override |
| public InputStream[] getCleanupScripts() { |
| final String scriptFile = "META-INF/db/schema-442to450-cleanup.sql"; |
| final InputStream script = Thread.currentThread().getContextClassLoader().getResourceAsStream(scriptFile); |
| if (script == null) { |
| throw new CloudRuntimeException("Unable to find " + scriptFile); |
| } |
| |
| return new InputStream[] {script}; |
| } |
| |
| private void dropInvalidKeyFromStoragePoolTable(Connection conn) { |
| HashMap<String, List<String>> uniqueKeys = new HashMap<String, List<String>>(); |
| List<String> keys = new ArrayList<String>(); |
| |
| keys.add("id_2"); |
| uniqueKeys.put("storage_pool", keys); |
| |
| s_logger.debug("Dropping id_2 key from storage_pool table"); |
| for (Map.Entry<String, List<String>> entry: uniqueKeys.entrySet()) { |
| DbUpgradeUtils.dropKeysIfExist(conn,entry.getKey(), entry.getValue(), false); |
| } |
| } |
| |
| private void dropDuplicatedForeignKeyFromAsyncJobTable(Connection conn) { |
| HashMap<String, List<String>> foreignKeys = new HashMap<String, List<String>>(); |
| List<String> keys = new ArrayList<String>(); |
| |
| keys.add("fk_async_job_join_map__join_job_id"); |
| foreignKeys.put("async_job_join_map", keys); |
| |
| s_logger.debug("Dropping fk_async_job_join_map__join_job_id key from async_job_join_map table"); |
| for (Map.Entry<String, List<String>> entry: foreignKeys.entrySet()) { |
| DbUpgradeUtils.dropKeysIfExist(conn,entry.getKey(), entry.getValue(), true); |
| } |
| } |
| } |