|
| 1 | +use snafu::{OptionExt, ResultExt, Snafu}; |
| 2 | +use stackable_operator::{ |
| 3 | + memory::{BinaryMultiple, MemoryQuantity}, |
| 4 | + role_utils::{self, GenericRoleConfig, JavaCommonConfig, JvmArgumentOverrides, Role}, |
| 5 | +}; |
| 6 | + |
| 7 | +use crate::crd::{ |
| 8 | + v1alpha1::HiveCluster, MetaStoreConfig, MetaStoreConfigFragment, JVM_SECURITY_PROPERTIES_FILE, |
| 9 | + METRICS_PORT, STACKABLE_CONFIG_DIR, STACKABLE_TRUST_STORE, STACKABLE_TRUST_STORE_PASSWORD, |
| 10 | +}; |
| 11 | + |
| 12 | +const JAVA_HEAP_FACTOR: f32 = 0.8; |
| 13 | + |
| 14 | +#[derive(Snafu, Debug)] |
| 15 | +pub enum Error { |
| 16 | + #[snafu(display("invalid memory resource configuration - missing default or value in crd?"))] |
| 17 | + MissingMemoryResourceConfig, |
| 18 | + |
| 19 | + #[snafu(display("invalid memory config"))] |
| 20 | + InvalidMemoryConfig { |
| 21 | + source: stackable_operator::memory::Error, |
| 22 | + }, |
| 23 | + |
| 24 | + #[snafu(display("failed to merge jvm argument overrides"))] |
| 25 | + MergeJvmArgumentOverrides { source: role_utils::Error }, |
| 26 | +} |
| 27 | + |
| 28 | +/// All JVM arguments. |
| 29 | +fn construct_jvm_args( |
| 30 | + hive: &HiveCluster, |
| 31 | + role: &Role<MetaStoreConfigFragment, GenericRoleConfig, JavaCommonConfig>, |
| 32 | + role_group: &str, |
| 33 | +) -> Result<Vec<String>, Error> { |
| 34 | + let mut jvm_args = vec![ |
| 35 | + format!("-Djava.security.properties={STACKABLE_CONFIG_DIR}/{JVM_SECURITY_PROPERTIES_FILE}"), |
| 36 | + format!("-javaagent:/stackable/jmx/jmx_prometheus_javaagent.jar={METRICS_PORT}:/stackable/jmx/jmx_hive_config.yaml"), |
| 37 | + format!("-Djavax.net.ssl.trustStore={STACKABLE_TRUST_STORE}"), |
| 38 | + format!("-Djavax.net.ssl.trustStorePassword={STACKABLE_TRUST_STORE_PASSWORD}"), |
| 39 | + format!("-Djavax.net.ssl.trustStoreType=pkcs12"), |
| 40 | + ]; |
| 41 | + |
| 42 | + if hive.has_kerberos_enabled() { |
| 43 | + jvm_args.push("-Djava.security.krb5.conf=/stackable/kerberos/krb5.conf".to_owned()); |
| 44 | + } |
| 45 | + |
| 46 | + let operator_generated = JvmArgumentOverrides::new_with_only_additions(jvm_args); |
| 47 | + let merged = role |
| 48 | + .get_merged_jvm_argument_overrides(role_group, &operator_generated) |
| 49 | + .context(MergeJvmArgumentOverridesSnafu)?; |
| 50 | + Ok(merged |
| 51 | + .effective_jvm_config_after_merging() |
| 52 | + // Sorry for the clone, that's how operator-rs is currently modelled :P |
| 53 | + .clone()) |
| 54 | +} |
| 55 | + |
| 56 | +/// Arguments that go into `HADOOP_OPTS`, so *not* the heap settings (which you can get using |
| 57 | +/// [`construct_hadoop_heapsize_env`]). |
| 58 | +pub fn construct_non_heap_jvm_args( |
| 59 | + hive: &HiveCluster, |
| 60 | + role: &Role<MetaStoreConfigFragment, GenericRoleConfig, JavaCommonConfig>, |
| 61 | + role_group: &str, |
| 62 | +) -> Result<String, Error> { |
| 63 | + let mut jvm_args = construct_jvm_args(hive, role, role_group)?; |
| 64 | + jvm_args.retain(|arg| !is_heap_jvm_argument(arg)); |
| 65 | + |
| 66 | + Ok(jvm_args.join(" ")) |
| 67 | +} |
| 68 | + |
| 69 | +/// This will be put into `HADOOP_HEAPSIZE`, which is just the heap size in megabytes (*without* the `m` |
| 70 | +/// unit prepended). |
| 71 | +pub fn construct_hadoop_heapsize_env(merged_config: &MetaStoreConfig) -> Result<String, Error> { |
| 72 | + let heap_size_in_mb = (MemoryQuantity::try_from( |
| 73 | + merged_config |
| 74 | + .resources |
| 75 | + .memory |
| 76 | + .limit |
| 77 | + .as_ref() |
| 78 | + .context(MissingMemoryResourceConfigSnafu)?, |
| 79 | + ) |
| 80 | + .context(InvalidMemoryConfigSnafu)? |
| 81 | + * JAVA_HEAP_FACTOR) |
| 82 | + .scale_to(BinaryMultiple::Mebi); |
| 83 | + |
| 84 | + Ok((heap_size_in_mb.value.floor() as u32).to_string()) |
| 85 | +} |
| 86 | + |
| 87 | +fn is_heap_jvm_argument(jvm_argument: &str) -> bool { |
| 88 | + let lowercase = jvm_argument.to_lowercase(); |
| 89 | + |
| 90 | + lowercase.starts_with("-xms") || lowercase.starts_with("-xmx") |
| 91 | +} |
| 92 | + |
| 93 | +#[cfg(test)] |
| 94 | +mod tests { |
| 95 | + use super::*; |
| 96 | + use crate::crd::HiveRole; |
| 97 | + |
| 98 | + #[test] |
| 99 | + fn test_construct_jvm_arguments_defaults() { |
| 100 | + let input = r#" |
| 101 | + apiVersion: hive.stackable.tech/v1alpha1 |
| 102 | + kind: HiveCluster |
| 103 | + metadata: |
| 104 | + name: simple-hive |
| 105 | + spec: |
| 106 | + image: |
| 107 | + productVersion: 4.0.0 |
| 108 | + clusterConfig: |
| 109 | + database: |
| 110 | + connString: jdbc:derby:;databaseName=/tmp/hive;create=true |
| 111 | + dbType: derby |
| 112 | + credentialsSecret: mySecret |
| 113 | + metastore: |
| 114 | + roleGroups: |
| 115 | + default: |
| 116 | + replicas: 1 |
| 117 | + "#; |
| 118 | + let (hive, merged_config, role, rolegroup) = construct_boilerplate(input); |
| 119 | + let non_heap_jvm_args = construct_non_heap_jvm_args(&hive, &role, &rolegroup).unwrap(); |
| 120 | + let hadoop_heapsize_env = construct_hadoop_heapsize_env(&merged_config).unwrap(); |
| 121 | + |
| 122 | + assert_eq!( |
| 123 | + non_heap_jvm_args, |
| 124 | + "-Djava.security.properties=/stackable/config/security.properties \ |
| 125 | + -javaagent:/stackable/jmx/jmx_prometheus_javaagent.jar=9084:/stackable/jmx/jmx_hive_config.yaml \ |
| 126 | + -Djavax.net.ssl.trustStore=/stackable/truststore.p12 \ |
| 127 | + -Djavax.net.ssl.trustStorePassword=changeit \ |
| 128 | + -Djavax.net.ssl.trustStoreType=pkcs12" |
| 129 | + ); |
| 130 | + assert_eq!(hadoop_heapsize_env, "409"); |
| 131 | + } |
| 132 | + |
| 133 | + #[test] |
| 134 | + fn test_construct_jvm_argument_overrides() { |
| 135 | + let input = r#" |
| 136 | + apiVersion: hive.stackable.tech/v1alpha1 |
| 137 | + kind: HiveCluster |
| 138 | + metadata: |
| 139 | + name: simple-hive |
| 140 | + spec: |
| 141 | + image: |
| 142 | + productVersion: 4.0.0 |
| 143 | + clusterConfig: |
| 144 | + database: |
| 145 | + connString: jdbc:derby:;databaseName=/tmp/hive;create=true |
| 146 | + dbType: derby |
| 147 | + credentialsSecret: mySecret |
| 148 | + metastore: |
| 149 | + config: |
| 150 | + resources: |
| 151 | + memory: |
| 152 | + limit: 42Gi |
| 153 | + jvmArgumentOverrides: |
| 154 | + add: |
| 155 | + - -Dhttps.proxyHost=proxy.my.corp |
| 156 | + - -Dhttps.proxyPort=8080 |
| 157 | + - -Djava.net.preferIPv4Stack=true |
| 158 | + roleGroups: |
| 159 | + default: |
| 160 | + replicas: 1 |
| 161 | + jvmArgumentOverrides: |
| 162 | + # We need more memory! |
| 163 | + removeRegex: |
| 164 | + - -Xmx.* |
| 165 | + - -Dhttps.proxyPort=.* |
| 166 | + add: |
| 167 | + - -Xmx40000m |
| 168 | + - -Dhttps.proxyPort=1234 |
| 169 | + "#; |
| 170 | + let (hive, merged_config, role, rolegroup) = construct_boilerplate(input); |
| 171 | + let non_heap_jvm_args = construct_non_heap_jvm_args(&hive, &role, &rolegroup).unwrap(); |
| 172 | + let hadoop_heapsize_env = construct_hadoop_heapsize_env(&merged_config).unwrap(); |
| 173 | + |
| 174 | + assert_eq!( |
| 175 | + non_heap_jvm_args, |
| 176 | + "-Djava.security.properties=/stackable/config/security.properties \ |
| 177 | + -javaagent:/stackable/jmx/jmx_prometheus_javaagent.jar=9084:/stackable/jmx/jmx_hive_config.yaml \ |
| 178 | + -Djavax.net.ssl.trustStore=/stackable/truststore.p12 \ |
| 179 | + -Djavax.net.ssl.trustStorePassword=changeit \ |
| 180 | + -Djavax.net.ssl.trustStoreType=pkcs12 \ |
| 181 | + -Dhttps.proxyHost=proxy.my.corp \ |
| 182 | + -Djava.net.preferIPv4Stack=true \ |
| 183 | + -Dhttps.proxyPort=1234" |
| 184 | + ); |
| 185 | + assert_eq!(hadoop_heapsize_env, "34406"); |
| 186 | + } |
| 187 | + |
| 188 | + fn construct_boilerplate( |
| 189 | + hive_cluster: &str, |
| 190 | + ) -> ( |
| 191 | + HiveCluster, |
| 192 | + MetaStoreConfig, |
| 193 | + Role<MetaStoreConfigFragment, GenericRoleConfig, JavaCommonConfig>, |
| 194 | + String, |
| 195 | + ) { |
| 196 | + let hive: HiveCluster = serde_yaml::from_str(hive_cluster).expect("illegal test input"); |
| 197 | + |
| 198 | + let hive_role = HiveRole::MetaStore; |
| 199 | + let rolegroup_ref = hive.metastore_rolegroup_ref("default"); |
| 200 | + let merged_config = hive.merged_config(&hive_role, &rolegroup_ref).unwrap(); |
| 201 | + let role = hive.spec.metastore.clone().unwrap(); |
| 202 | + |
| 203 | + (hive, merged_config, role, "default".to_owned()) |
| 204 | + } |
| 205 | +} |
0 commit comments