-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathgenerate-inventory.sh
More file actions
executable file
·199 lines (163 loc) · 7.52 KB
/
generate-inventory.sh
File metadata and controls
executable file
·199 lines (163 loc) · 7.52 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
#!/bin/bash
# Generate Kafka Connect Ansible Inventory and Configuration from Terraform State in S3
set -e
# Configuration
S3_BUCKET="${S3_BUCKET:-trendyol-kafka-terraform-state}"
S3_KEY="${S3_KEY:-prod/kafka-infrastructure/terraform.tfstate}"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
INVENTORY_FILE="${SCRIPT_DIR}/inventory.yml"
ENV_FILE="${SCRIPT_DIR}/.env"
CONNECTOR_JSON="${SCRIPT_DIR}/connectors/http-source-connector.json"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "🔄 Generating Kafka Connect Configuration from S3"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo ""
# Fetch Terraform state from S3
echo "📥 Fetching Terraform state from S3..."
STATE_JSON=$(aws s3 cp "s3://${S3_BUCKET}/${S3_KEY}" - 2>/dev/null || {
echo "❌ Error: Failed to fetch state from S3"
echo " Make sure AWS credentials are configured and bucket exists"
exit 1
})
echo "✅ State file downloaded successfully"
echo ""
# Extract Kafka Connect DNS
CONNECT_PUBLIC_DNS=$(echo "$STATE_JSON" | jq -r '.outputs.kafka_connect_public_dns.value[]' 2>/dev/null || echo "")
if [ -z "$CONNECT_PUBLIC_DNS" ] || [ "$CONNECT_PUBLIC_DNS" = "null" ]; then
echo "❌ Error: Could not find kafka_connect_public_dns in Terraform state"
exit 1
fi
# Convert to array
CONNECT_DNS_ARRAY=($CONNECT_PUBLIC_DNS)
# Check we have 2 nodes
if [ ${#CONNECT_DNS_ARRAY[@]} -ne 2 ]; then
echo "⚠️ Warning: Expected 2 Kafka Connect nodes, found ${#CONNECT_DNS_ARRAY[@]}"
fi
# Calculate CONNECT_PUBLIC_IP from DNS (convert ec2-54-217-66-144 to 54.217.66.144)
CONNECT_NODE1_DNS="${CONNECT_DNS_ARRAY[0]}"
CONNECT_NODE1_IP=$(echo "${CONNECT_NODE1_DNS}" | sed 's/ec2-//' | cut -d'.' -f1 | tr '-' '.')
CONNECT_NODE2_DNS="${CONNECT_DNS_ARRAY[1]}"
CONNECT_NODE2_IP=$(echo "${CONNECT_NODE2_DNS}" | sed 's/ec2-//' | cut -d'.' -f1 | tr '-' '.')
# Extract Kafka Broker Public DNS Names
BROKER_PUBLIC_DNS=$(echo "$STATE_JSON" | jq -r '.outputs.kafka_broker_public_dns.value[]' 2>/dev/null || echo "")
if [ -z "$BROKER_PUBLIC_DNS" ] || [ "$BROKER_PUBLIC_DNS" = "null" ]; then
echo "❌ Error: Could not find kafka_broker_public_dns in Terraform state"
exit 1
fi
# Convert to array and build bootstrap servers string
BROKER_DNS_ARRAY=($BROKER_PUBLIC_DNS)
KAFKA_BOOTSTRAP_SERVERS=""
for dns in "${BROKER_DNS_ARRAY[@]}"; do
if [ -z "$KAFKA_BOOTSTRAP_SERVERS" ]; then
KAFKA_BOOTSTRAP_SERVERS="${dns}:9092"
else
KAFKA_BOOTSTRAP_SERVERS="${KAFKA_BOOTSTRAP_SERVERS},${dns}:9092"
fi
done
echo "📊 Infrastructure Details:"
echo " Kafka Connect Node 1: ${CONNECT_DNS_ARRAY[0]}"
echo " Kafka Connect Node 2: ${CONNECT_DNS_ARRAY[1]}"
echo " Kafka Brokers: ${KAFKA_BOOTSTRAP_SERVERS}"
echo ""
# Generate inventory.yml
echo "📝 Generating ${INVENTORY_FILE}..."
cat > "${INVENTORY_FILE}" <<EOF
---
all:
children:
kafka_connect:
hosts:
kafka-connect-node-1:
ansible_host: ${CONNECT_DNS_ARRAY[0]}
ansible_user: ubuntu
ansible_ssh_private_key_file: ~/.ssh/trendyol-kafka-key.pem
ansible_ssh_common_args: '-o StrictHostKeyChecking=no'
kafka-connect-node-2:
ansible_host: ${CONNECT_DNS_ARRAY[1]}
ansible_user: ubuntu
ansible_ssh_private_key_file: ~/.ssh/trendyol-kafka-key.pem
ansible_ssh_common_args: '-o StrictHostKeyChecking=no'
EOF
echo "✅ Inventory generated successfully"
echo ""
# Generate .env file with dynamic KAFKA_BOOTSTRAP_SERVERS
echo "📝 Generating ${ENV_FILE}..."
cat > "${ENV_FILE}" <<EOF
# Kafka Broker Configuration (Auto-generated from Terraform State)
KAFKA_BOOTSTRAP_SERVERS=${KAFKA_BOOTSTRAP_SERVERS}
# Security Configuration
KAFKA_SECURITY_PROTOCOL=${KAFKA_SECURITY_PROTOCOL:-SASL_SSL}
KAFKA_SASL_MECHANISM=${KAFKA_SASL_MECHANISM:-SCRAM-SHA-512}
KAFKA_SASL_USERNAME=${KAFKA_SASL_USERNAME:-admin}
KAFKA_SASL_PASSWORD=${ADMIN_PASSWORD:-${KAFKA_SASL_PASSWORD:-asd}}
SSL_TRUSTSTORE_PASSWORD=${SSL_TRUSTSTORE_PASSWORD:-confluenttruststorepass}
SSL_KEYSTORE_PASSWORD=${SSL_KEYSTORE_PASSWORD:-confluentkeystorestorepass}
SSL_KEY_PASSWORD=${SSL_KEY_PASSWORD:-confluentkeystorestorepass}
# JWT Token for FastAPI Authentication
JWT_TOKEN=${JWT_TOKEN:-token}
EOF
echo "✅ .env file generated successfully"
echo ""
# Update docker-compose-1.yml and docker-compose-2.yml with correct advertised host names
DOCKER_COMPOSE_1="${SCRIPT_DIR}/docker-compose-1.yml"
DOCKER_COMPOSE_2="${SCRIPT_DIR}/docker-compose-2.yml"
if [ -f "$DOCKER_COMPOSE_1" ]; then
sed -i "s|CONNECT_REST_ADVERTISED_HOST_NAME:.*|CONNECT_REST_ADVERTISED_HOST_NAME: \"${CONNECT_NODE1_DNS}\"|g" "$DOCKER_COMPOSE_1"
echo "✅ Updated CONNECT_REST_ADVERTISED_HOST_NAME in docker-compose-1.yml to ${CONNECT_NODE1_DNS}"
fi
if [ -f "$DOCKER_COMPOSE_2" ]; then
sed -i "s|CONNECT_REST_ADVERTISED_HOST_NAME:.*|CONNECT_REST_ADVERTISED_HOST_NAME: \"${CONNECT_NODE2_DNS}\"|g" "$DOCKER_COMPOSE_2"
echo "✅ Updated CONNECT_REST_ADVERTISED_HOST_NAME in docker-compose-2.yml to ${CONNECT_NODE2_DNS}"
fi
# Generate connector JSON from template
CONNECTOR_TEMPLATE="${SCRIPT_DIR}/connectors/http-source-connector.json.template"
if [ -f "${CONNECTOR_TEMPLATE}" ]; then
echo "📝 Generating ${CONNECTOR_JSON} from template..."
# Read JWT token from environment or use default
JWT_TOKEN_VALUE=${JWT_TOKEN:-token}
# Replace placeholders in template
sed -e "s|{{CONNECT_PUBLIC_IP}}|${CONNECT_NODE1_DNS}|g" \
-e "s|{{JWT_TOKEN}}|${JWT_TOKEN_VALUE}|g" \
"${CONNECTOR_TEMPLATE}" > "${CONNECTOR_JSON}"
echo "✅ Connector JSON generated from template"
echo " URL: http://${CONNECT_NODE1_DNS}:2020/topics"
echo ""
fi
# Update all shell scripts in scripts/ directory with KAFKA_BOOTSTRAP_SERVERS
if [ -d "${SCRIPT_DIR}/scripts" ]; then
echo "📝 Updating scripts with KAFKA_BOOTSTRAP_SERVERS..."
for script in "${SCRIPT_DIR}/scripts"/*.sh; do
if [ -f "$script" ]; then
# Check if script contains KAFKA_BOOTSTRAP_SERVERS or CONNECT_HOST
if grep -q "KAFKA_BOOTSTRAP_SERVERS\|CONNECT_HOST" "$script" 2>/dev/null; then
# Update KAFKA_BOOTSTRAP_SERVERS and CONNECT_HOST
sed -i "s|KAFKA_BOOTSTRAP_SERVERS=.*|KAFKA_BOOTSTRAP_SERVERS=\"${KAFKA_BOOTSTRAP_SERVERS}\"|g" "$script"
sed -i "s|CONNECT_HOST=.*|CONNECT_HOST=\"localhost\"|g" "$script"
echo " ✅ Updated $(basename $script)"
fi
fi
done
# Clean up backup files
rm -f "${SCRIPT_DIR}/scripts"/*.sh.bak
echo ""
fi
echo "📋 Generated inventory:"
cat "${INVENTORY_FILE}"
echo ""
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "✅ Done! Files updated:"
echo " - ${INVENTORY_FILE}"
echo " - ${ENV_FILE}"
echo " - ${CONNECTOR_JSON}"
echo " - scripts/*.sh"
echo ""
echo "📊 Configuration:"
echo " Kafka Connect Node 1: ${CONNECT_DNS_ARRAY[0]}"
echo " Kafka Connect Node 2: ${CONNECT_DNS_ARRAY[1]}"
echo " Bootstrap Servers: ${KAFKA_BOOTSTRAP_SERVERS}"
echo ""
echo "💡 Usage:"
echo " Deploy: ansible-playbook -i inventory.yml ansible_kafka_connect.yml"
echo " Test Node 1: curl http://${CONNECT_DNS_ARRAY[0]}:8083/"
echo " Test Node 2: curl http://${CONNECT_DNS_ARRAY[1]}:8083/"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"