GCP Static Proxy Configuration
Configure Google Cloud Platform services through static SSL proxy IPs for security compliance and IP whitelisting requirements
Table of Contents
Overview
Google Cloud Platform services often require static IP addresses for security compliance, API whitelisting, and network security rules. This guide demonstrates how to configure various GCP services to work through your OutboundGateway static SSL proxy IPs.
Key Benefits: Consistent IP addresses for compliance, simplified VPC firewall rules, enhanced security, and better control over outbound traffic.
Prerequisites
Required Accounts & Tools
- Active OutboundGateway subscription with static IP credentials
- Google Cloud Platform account with appropriate permissions
- gcloud CLI installed and configured
- Python 3.8+ with Google Cloud client libraries
- Node.js 14+ with Google Cloud SDK (optional)
Enable Required APIs
# Enable required GCP APIs
gcloud services enable cloudfunctions.googleapis.com
gcloud services enable storage.googleapis.com
gcloud services enable bigquery.googleapis.com
gcloud services enable compute.googleapis.com
Cloud Functions with Static Proxy
Python Cloud Function Example
# main.py
import functions_framework
import requests
import os
import json
# Proxy configuration from environment variables
PROXY_HOST = os.environ.get('PROXY_HOST', '192.168.1.100')
PROXY_PORT = os.environ.get('PROXY_PORT', '8080')
PROXY_USER = os.environ.get('PROXY_USER', 'your_proxy_username')
PROXY_PASS = os.environ.get('PROXY_PASS', 'your_proxy_password')
def create_proxy_session():
"""Create requests session with SSL proxy configuration"""
session = requests.Session()
proxy_url = f'https://{PROXY_USER}:{PROXY_PASS}@{PROXY_HOST}:{PROXY_PORT}'
session.proxies = {
'https': proxy_url
}
return session
@functions_framework.http
def api_proxy_handler(request):
"""HTTP Cloud Function that makes API calls through static proxy"""
# Set CORS headers
headers = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'GET, POST, OPTIONS',
'Access-Control-Allow-Headers': 'Content-Type',
}
# Handle preflight requests
if request.method == 'OPTIONS':
return ('', 204, headers)
try:
session = create_proxy_session()
# Example: Call external API through proxy
api_url = 'https://api.example.com/data'
response = session.get(api_url, timeout=30)
response_data = {
'success': True,
'data': response.json() if response.headers.get('content-type', '').startswith('application/json') else response.text,
'proxy_used': f'{PROXY_HOST}:{PROXY_PORT}',
'status_code': response.status_code
}
return (json.dumps(response_data), 200, {**headers, 'Content-Type': 'application/json'})
except Exception as e:
error_response = {
'success': False,
'error': str(e),
'proxy_used': f'{PROXY_HOST}:{PROXY_PORT}'
}
return (json.dumps(error_response), 500, {**headers, 'Content-Type': 'application/json'})
@functions_framework.cloud_event
def background_task_handler(cloud_event):
"""Background Cloud Function that processes events through proxy"""
try:
session = create_proxy_session()
# Process the event data
event_data = cloud_event.data
# Example: Make webhook call through proxy
webhook_url = 'https://webhook.example.com/processing'
response = session.post(
webhook_url,
json=event_data,
headers={'Content-Type': 'application/json'},
timeout=30
)
print(f"Webhook sent successfully through proxy. Status: {response.status_code}")
except Exception as e:
print(f"Error processing event through proxy: {e}")
raise
requirements.txt
functions-framework==3.*
requests==2.31.0
Deploy Cloud Function
# Deploy with environment variables
gcloud functions deploy api-proxy-function \
--runtime python39 \
--trigger-http \
--allow-unauthenticated \
--set-env-vars PROXY_HOST=192.168.1.100,PROXY_PORT=8080,PROXY_USER=your_proxy_username,PROXY_PASS=your_proxy_password \
--entry-point api_proxy_handler \
--region us-central1
# Deploy background function
gcloud functions deploy background-processor \
--runtime python39 \
--trigger-topic your-topic \
--set-env-vars PROXY_HOST=192.168.1.100,PROXY_PORT=8080,PROXY_USER=your_proxy_username,PROXY_PASS=your_proxy_password \
--entry-point background_task_handler \
--region us-central1
Cloud Storage with Static Proxy
Python Example with Google Cloud Storage
from google.cloud import storage
import os
from google.auth.transport.requests import Request
from google.oauth2 import service_account
# Proxy configuration
PROXY_HOST = "192.168.1.100" # Your static IP
PROXY_PORT = 8080
PROXY_USER = "your_proxy_username"
PROXY_PASS = "your_proxy_password"
class ProxyStorageClient:
def __init__(self, credentials_path=None):
"""Initialize storage client with proxy configuration"""
# Set SSL proxy environment variables
os.environ['HTTPS_PROXY'] = f'https://{PROXY_USER}:{PROXY_PASS}@{PROXY_HOST}:{PROXY_PORT}'
# Initialize credentials
if credentials_path and os.path.exists(credentials_path):
credentials = service_account.Credentials.from_service_account_file(
credentials_path,
scopes=['https://www.googleapis.com/auth/cloud-platform']
)
# Refresh credentials through proxy
credentials.refresh(Request())
else:
# Use default credentials
credentials = None
self.client = storage.Client(credentials=credentials)
def upload_file(self, bucket_name, source_file, destination_blob_name):
"""Upload file to Cloud Storage through proxy"""
try:
bucket = self.client.bucket(bucket_name)
blob = bucket.blob(destination_blob_name)
blob.upload_from_filename(source_file)
print(f"File {source_file} uploaded to gs://{bucket_name}/{destination_blob_name}")
return True
except Exception as e:
print(f"Upload failed: {e}")
return False
def download_file(self, bucket_name, blob_name, destination_file):
"""Download file from Cloud Storage through proxy"""
try:
bucket = self.client.bucket(bucket_name)
blob = bucket.blob(blob_name)
blob.download_to_filename(destination_file)
print(f"File gs://{bucket_name}/{blob_name} downloaded to {destination_file}")
return True
except Exception as e:
print(f"Download failed: {e}")
return False
def list_files(self, bucket_name, prefix=None):
"""List files in Cloud Storage bucket through proxy"""
try:
bucket = self.client.bucket(bucket_name)
blobs = bucket.list_blobs(prefix=prefix)
file_list = []
for blob in blobs:
file_list.append({
'name': blob.name,
'size': blob.size,
'updated': blob.updated,
'content_type': blob.content_type
})
return file_list
except Exception as e:
print(f"List failed: {e}")
return []
# Usage example
def main():
proxy_storage = ProxyStorageClient('path/to/service-account.json')
# Upload a file
success = proxy_storage.upload_file(
bucket_name='your-bucket-name',
source_file='local-file.txt',
destination_blob_name='remote-file.txt'
)
if success:
# List files
files = proxy_storage.list_files('your-bucket-name')
print(f"Found {len(files)} files in bucket")
for file in files:
print(f" - {file['name']} ({file['size']} bytes)")
if __name__ == '__main__':
main()
Node.js Example with @google-cloud/storage
const { Storage } = require('@google-cloud/storage');
const { HttpsProxyAgent } = require('https-proxy-agent');
// SSL Proxy configuration
const proxyUrl = 'https://your_proxy_username:your_proxy_password@192.168.1.100:8080';
const proxyAgent = new HttpsProxyAgent(proxyUrl);
class ProxyStorageClient {
constructor(serviceAccountPath) {
// Configure HTTPS agent for proxy
process.env.NODE_TLS_REJECT_UNAUTHORIZED = '0';
this.storage = new Storage({
keyFilename: serviceAccountPath,
projectId: 'your-gcp-project-id',
// Use proxy agent for all requests
retryOptions: {
autoRetry: true,
maxRetries: 3,
retryDelayMultiplier: 2
}
});
// Patch the storage client to use proxy
this.patchStorageForProxy();
}
patchStorageForProxy() {
const originalRequest = this.storage.makeReq;
this.storage.makeReq = async (opts, callback) => {
// Add proxy agent to request options
opts.agent = proxyAgent;
return originalRequest.call(this.storage, opts, callback);
};
}
async uploadFile(bucketName, sourceFile, destinationBlobName) {
try {
const bucket = this.storage.bucket(bucketName);
await bucket.upload(sourceFile, {
destination: destinationBlobName,
metadata: {
contentType: 'text/plain'
}
});
console.log(`File ${sourceFile} uploaded to gs://${bucketName}/${destinationBlobName}`);
return true;
} catch (error) {
console.error('Upload failed:', error);
return false;
}
}
async downloadFile(bucketName, blobName, destinationFile) {
try {
const bucket = this.storage.bucket(bucketName);
const file = bucket.file(blobName);
await file.download({
destination: destinationFile
});
console.log(`File gs://${bucketName}/${blobName} downloaded to ${destinationFile}`);
return true;
} catch (error) {
console.error('Download failed:', error);
return false;
}
}
async listFiles(bucketName, prefix = null) {
try {
const bucket = this.storage.bucket(bucketName);
const [files] = await bucket.getFiles({ prefix });
const fileList = files.map(file => ({
name: file.name,
size: file.metadata.size,
updated: file.metadata.updated,
contentType: file.metadata.contentType
}));
return fileList;
} catch (error) {
console.error('List failed:', error);
return [];
}
}
}
// Usage example
async function main() {
const proxyStorage = new ProxyStorageClient('path/to/service-account.json');
// Upload a file
const uploadSuccess = await proxyStorage.uploadFile(
'your-bucket-name',
'local-file.txt',
'remote-file.txt'
);
if (uploadSuccess) {
// List files
const files = await proxyStorage.listFiles('your-bucket-name');
console.log(`Found ${files.length} files in bucket`);
files.forEach(file => {
console.log(` - ${file.name} (${file.size} bytes)`);
});
}
}
main().catch(console.error);
BigQuery with Static Proxy
Python Example with google-cloud-bigquery
from google.cloud import bigquery
import os
import pandas as pd
# Proxy configuration
PROXY_HOST = "192.168.1.100"
PROXY_PORT = 8080
PROXY_USER = "your_proxy_username"
PROXY_PASS = "your_proxy_password"
class ProxyBigQueryClient:
def __init__(self, credentials_path=None):
"""Initialize BigQuery client with proxy configuration"""
# Set SSL proxy environment variables
os.environ['HTTPS_PROXY'] = f'https://{PROXY_USER}:{PROXY_PASS}@{PROXY_HOST}:{PROXY_PORT}'
# Initialize client
if credentials_path and os.path.exists(credentials_path):
self.client = bigquery.Client.from_service_account_json(
credentials_path,
project='your-gcp-project-id'
)
else:
self.client = bigquery.Client(project='your-gcp-project-id')
def execute_query(self, query, parameters=None):
"""Execute BigQuery query through proxy"""
try:
job_config = bigquery.QueryJobConfig()
if parameters:
job_config.query_parameters = parameters
query_job = self.client.query(query, job_config=job_config)
results = query_job.result()
# Convert to DataFrame
df = results.to_dataframe()
print(f"Query executed successfully. Returned {len(df)} rows")
return df
except Exception as e:
print(f"Query execution failed: {e}")
return None
def get_table_info(self, dataset_id, table_id):
"""Get table information through proxy"""
try:
dataset_ref = self.client.dataset(dataset_id)
table_ref = dataset_ref.table(table_id)
table = self.client.get_table(table_ref)
table_info = {
'table_id': table.table_id,
'num_rows': table.num_rows,
'num_bytes': table.num_bytes,
'created': table.created,
'modified': table.modified,
'schema': [
{
'name': field.name,
'type': field.field_type,
'mode': field.mode
} for field in table.schema
]
}
return table_info
except Exception as e:
print(f"Failed to get table info: {e}")
return None
def export_to_storage(self, query, destination_uri, format='CSV'):
"""Export query results to Cloud Storage through proxy"""
try:
job_config = bigquery.QueryJobConfig()
job_config.destination = destination_uri
job_config.create_disposition = bigquery.CreateDisposition.CREATE_IF_NEEDED
job_config.write_disposition = bigquery.WriteDisposition.WRITE_TRUNCATE
query_job = self.client.query(query, job_config=job_config)
query_job.result() # Wait for completion
print(f"Results exported to {destination_uri}")
return True
except Exception as e:
print(f"Export failed: {e}")
return False
# Usage example
def main():
proxy_bq = ProxyBigQueryClient('path/to/service-account.json')
# Execute a sample query
query = """
SELECT
name,
COUNT(*) as count
FROM `bigquery-public-data.usa_names.usa_1910_2013`
WHERE state = 'CA'
GROUP BY name
ORDER BY count DESC
LIMIT 10
"""
results_df = proxy_bq.execute_query(query)
if results_df is not None:
print("Query Results:")
print(results_df)
# Export to Cloud Storage
proxy_bq.export_to_storage(
query,
'gs://your-bucket/query_results.csv'
)
if __name__ == '__main__':
main()
VPC Firewall Rules Configuration
Create Firewall Rules for Static IPs
# Create firewall rule to allow traffic from your static IP
gcloud compute firewall-rules create proxyflow-static-ips \
--network default \
--action ALLOW \
--direction INGRESS \
--priority 1000 \
--source-ranges 192.168.1.100/32,192.168.1.101/32 \
--rules tcp:8080,tcp:443,tcp:80 \
--description "Allow traffic from OutboundGateway static IPs"
# Allow all traffic from static IPs (less restrictive)
gcloud compute firewall-rules create proxyflow-static-ips-all \
--network default \
--action ALLOW \
--direction INGRESS \
--priority 1001 \
--source-ranges 192.168.1.100/32,192.168.1.101/32 \
--rules all \
--description "Allow all traffic from OutboundGateway static IPs"
# List existing firewall rules
gcloud compute firewall-rules list --filter="name~proxyflow"
# Delete firewall rule if needed
gcloud compute firewall-rules delete proxyflow-static-ips
Service Account Permissions
# Create service account for proxy operations
gcloud iam service-accounts create proxyflow-sa \
--display-name "OutboundGateway Service Account" \
--description "Service account for OutboundGateway static proxy operations"
# Grant necessary permissions
gcloud projects add-iam-policy-binding your-gcp-project-id \
--member="serviceAccount:proxyflow-sa@your-gcp-project-id.iam.gserviceaccount.com" \
--role="roles/storage.admin"
gcloud projects add-iam-policy-binding your-gcp-project-id \
--member="serviceAccount:proxyflow-sa@your-gcp-project-id.iam.gserviceaccount.com" \
--role="roles/bigquery.admin"
gcloud projects add-iam-policy-binding your-gcp-project-id \
--member="serviceAccount:proxyflow-sa@your-gcp-project-id.iam.gserviceaccount.com" \
--role="roles/cloudfunctions.admin"
# Download service account key
gcloud iam service-accounts keys create ~/proxyflow-sa-key.json \
--iam-account=proxyflow-sa@your-gcp-project-id.iam.gserviceaccount.com
Troubleshooting
Authentication Issues
If you encounter authentication errors:
- Verify service account key file path and permissions
- Check if the service account has required IAM roles
- Ensure GCP project ID is correctly configured
- Validate proxy credentials are active and correct
Network Connectivity
For network-related issues:
- Verify firewall rules allow traffic from your static IPs
- Check VPC settings and network tags
- Ensure Cloud Functions have internet access enabled
- Test proxy connectivity from your network
Performance Optimization
To improve performance:
- Use connection pooling for storage operations
- Implement retry logic with exponential backoff
- Monitor proxy bandwidth usage and latency
- Cache frequently accessed data when possible
Ready to Configure GCP with Static IPs?
Get your static SSL proxy IPs today and ensure your Google Cloud services maintain consistent IP addresses for compliance and security requirements.
Get Started with OutboundGateway