Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Leverage of Cloud Shell on GCP #36

Open
luisvillarreal opened this issue Jul 14, 2020 · 2 comments
Open

Leverage of Cloud Shell on GCP #36

luisvillarreal opened this issue Jul 14, 2020 · 2 comments

Comments

@luisvillarreal
Copy link

luisvillarreal commented Jul 14, 2020

I know that Azure also has an equivalent, and AWS recently released one, but I'm familiar with GCP and built a script that can help the general public to make a quick deploy of this workshop with the use of Cloud Shell. It's a free service and it's pre-loaded with everything you need for this workshop.

It requires to go to console.cloud.google.com and run Cloud Shell with a selected Project. I extended workshop-create.py and named it workshop-quickcreate-gcp.py.

It creates the myworkshop directory, copies the yaml file, creates a key for the CE default service account (if non-existent), re-creates the yaml file for GCP using easy-to-follow prompts. After I cloned the repo, I placed it in confluent-hybrid-cloud-workshop directory.

Ran it:
python3 workshop-quickcreate-gcp.py --dir ~/myworkshop

Here is the code:


import argparse
import os
import yaml
import json
import shutil
import fileinput
import re
import glob
from subprocess import run, PIPE
from pathlib import Path

print()
print()

argparse = argparse.ArgumentParser()
argparse.add_argument('--dir', help="Workshop directory", required=True)
args = argparse.parse_args()

key_location = f'{Path.home()}/key.json'

def create_sa_key():
    s_a = run(['gcloud iam service-accounts list | grep "Compute Engine default"'],
              shell = True,
              stdout = PIPE,
              stderr = PIPE).stdout.decode('utf-8').split()[-2].strip()

    proc = run([f'gcloud iam service-accounts keys create ~/key.json --iam-account {s_a}'],
               shell = True,
               stdout = PIPE,
               stderr = PIPE)
    print(proc.stdout.decode('utf-8'))

if os.path.exists(key_location):
    with open(key_location, 'r') as fh:
        ky = json.load(fh)
        try:
            print('Using service account: ' + ky['client_email'])
        except:
            create_sa_key()
else:
    create_sa_key()

print()

run(['mkdir -p ~/myworkshop'], shell = True, stdout = PIPE, stderr = PIPE)
run(['cp workshop-example-gcp.yaml ~/myworkshop/workshop.yaml'], shell = True, stdout = PIPE, stderr = PIPE)

with open(f'{args.dir}/workshop.yaml', 'r') as fh:
    wkshp_yaml = yaml.full_load(fh)


wkshp_yaml['workshop']['core']['project'] = os.environ['DEVSHELL_PROJECT_ID']
wkshp_yaml['workshop']['core']['credentials_file_path'] = key_location

try:
    wkshp_yaml['workshop'].pop('extensions')
except:
    pass

def iterate_config_variables(var_name, var_type, var_value = None, gcp_list = None, gcp_filter = None, gcp_zone = None):

    print()
    message = f'Select a value for {var_name} (default = "{var_value}"): '
    if gcp_list in ['regions', 'zones', 'machine-types']:
        print(f'Fetching {gcp_list}...')
        command = f'gcloud compute {gcp_list} list'

        if gcp_zone:
            command += f' --zones {gcp_zone}'

        if gcp_filter:
            command += f' | grep "{gcp_filter}"'
        opts = run([command],
                    shell = True,
                    stdout = PIPE,
                    stderr = PIPE).stdout.decode('utf-8').split('\n')[1:-1]
        opts_dict = {i+1:item.split()[0].strip() for i, item in enumerate(opts)}
        for i, item in opts_dict.items():
            print(f'{i}\t{item}')
        message = f'Select a number from the options listed above (default = {var_value}): '
    while(True):
        var_input = input(message) or var_value
        if gcp_list:
            try:
                var_input = opts_dict[int(var_input)]
                break
            except:
                print('Not a valid option. Try again...')

        elif var_input:
            try:
                var_input = var_type(var_input)
                break
            except:
                print('Not a valid option. Try again...')
    return var_input

wkshp_yaml['workshop']['name'] = iterate_config_variables('workshop name',
                                                          str,
                                                          var_value = 'dc')
wkshp_yaml['workshop']['participant_count'] = iterate_config_variables('number of participants',
                                                                       int,
                                                                       var_value = 1)
wkshp_yaml['workshop']['participant_password'] = iterate_config_variables('workshop password',
                                                                          str,
                                                                          var_value = 'workshop123!')
reg = iterate_config_variables('region',
                               str,
                               var_value = 18,
                               gcp_list = 'regions')
wkshp_yaml['workshop']['core']['region'] = reg

zne = iterate_config_variables('zone',
                               str,
                               var_value = 1,
                               gcp_list = 'zones',
                               gcp_filter = reg)
wkshp_yaml['workshop']['core']['region_zone'] =zne

wkshp_yaml['workshop']['core']['vm_type'] = iterate_config_variables('machine type',
                                                                     str,
                                                                     var_value = 4,
                                                                     gcp_list = 'machine-types',
                                                                     gcp_filter = 'n1-standard',
                                                                     gcp_zone = zne)

wkshp_yaml['workshop']['core']['ccloud_bootstrap_servers'] = iterate_config_variables('ccloud bootstrap servers',
                                                                                      str)
wkshp_yaml['workshop']['core']['ccloud_api_key'] = iterate_config_variables('ccloud api key',
                                                                                      str)
wkshp_yaml['workshop']['core']['ccloud_api_secret'] = iterate_config_variables('ccloud api secret',
                                                                                      str)

print()
print('Writing this configuration to YAML file...')
print(json.dumps(wkshp_yaml, indent = 4))

with open(f'{args.dir}/workshop.yaml', 'w') as fh:
    wkshp_yaml = yaml.dump(wkshp_yaml,fh)

docker_staging=os.path.join(args.dir, ".docker_staging")
terraform_staging=os.path.join(args.dir, ".terraform_staging")

# Open and parse configuration file
with open( os.path.join(args.dir, "workshop.yaml"), 'r') as yaml_file:
    try:
        config = yaml.safe_load(yaml_file)
    except yaml.YAMLError as exc:
        print(exc)

def copytree(src, dst):
  if not os.path.exists(dst):
    os.makedirs(dst)
    shutil.copystat(src, dst)
  lst = os.listdir(src)
  for item in lst:
    s = os.path.join(src, item)
    d = os.path.join(dst, item)
    if os.path.isdir(s):
      copytree(s, d)
    else:
      shutil.copy2(s, d)

if int(config['workshop']['participant_count']) > 35:
  print()
  print("*"*70)
  print("WARNING: Make sure your Confluent Cloud cluster has enough free partitions")
  print("to support this many participants. Each participant uses ~50 partitions.")
  print("*"*70)
  print()
  while True:
    val = input('Do You Want To Continue (y/n)? ')
    if val == 'y':
      break
    elif val =='n':
      exit()

#----------------------------------------
# Create the Terraform staging directory
#----------------------------------------

# Copy core terraform files to terraform staging
copytree(os.path.join("./core/terraform", config['workshop']['core']['cloud_provider']), terraform_staging)
copytree("./core/terraform/common", os.path.join(terraform_staging, "common"))

# Copy extension terraform files to terraform staging
if 'extensions' in config['workshop'] and config['workshop']['extensions'] != None:
    for extension in config['workshop']['extensions']:
        if os.path.exists(os.path.join("./extensions", extension, "terraform")):
            copytree(os.path.join("./extensions", extension, "terraform"), terraform_staging)

# Create Terraform tfvars file
with open(os.path.join(terraform_staging, "terraform.tfvars"), 'w') as tfvars_file:

    # Process high level
    for var in config['workshop']:
        if var not in ['core', 'extensions']:
            tfvars_file.write(str(var) + '="' + str(config['workshop'][var]) + "\"\n")
    for var in config['workshop']['core']:
        if var != 'cloud_provider':
            tfvars_file.write(str(var) + '="' + str(config['workshop']['core'][var]) + "\"\n")
    if 'extensions' in config['workshop'] and config['workshop']['extensions'] != None:
        for extension in config['workshop']['extensions']:
            if os.path.exists(os.path.join("./extensions", extension, "terraform")):
                if config['workshop']['extensions'][extension] != None:
                    for var in config['workshop']['extensions'][extension]:
                        tfvars_file.write(str(var) + '="' + str(config['workshop']['extensions'][extension][var]) + "\"\n")

#----------------------------------------------------------------------------
# Create the Docker staging directory, this directory is uploaded to each VM
#----------------------------------------------------------------------------
# remove stage directory
if os.path.exists(docker_staging):
    shutil.rmtree(docker_staging)
# Create staging directory and copy the required docker files into it
os.mkdir(docker_staging)
os.mkdir(os.path.join(docker_staging, "extensions"))
copytree("./core/docker/", docker_staging)
# Copy asciidoc directory to .docker_staging
copytree(os.path.join("./core/asciidoc"), os.path.join(docker_staging, "asciidoc"))
# Deal with extensions
if 'extensions' in config['workshop'] and config['workshop']['extensions'] != None:
    # Add each extensions asciidoc file as an include in the main workshop.adoc file
    includes = []
    include_str=""
    for extension in config['workshop']['extensions']:
        if os.path.isdir(os.path.join("./extensions", extension, "asciidoc")):
            includes.append(glob.glob(os.path.join("./extensions", extension, "asciidoc/*.adoc"))[0])
    # Build extension include string
    for include in includes:
        include_str += 'include::.' + include + '[]\n'

    # Add extension includes to core workshop.adoc
    for line in fileinput.input(os.path.join(docker_staging, "asciidoc/workshop.adoc"), inplace=True):
        line=re.sub("^#EXTENSIONS_PLACEHOLDER#",include_str,line)
        print(line.rstrip())
    # Copy extension asciidoc files to docker staging
    for extension in config['workshop']['extensions']:
        if os.path.isdir(os.path.join("./extensions", extension, "asciidoc")):
            copytree(os.path.join("./extensions", extension, "asciidoc"), os.path.join(docker_staging, "extensions", extension, "asciidoc"))
    # Copy extension images to docker staging
    for extension in config['workshop']['extensions']:
        if os.path.isdir(os.path.join("./extensions", extension, "asciidoc/images")):
            copytree(os.path.join("./extensions", extension, "asciidoc/images"), os.path.join(docker_staging, "asciidoc/images"))
    # Copy extension docker files to docker staging and create docker .env file
    for extension in config['workshop']['extensions']:
        if os.path.isdir(os.path.join("./extensions", extension, "docker")):
            copytree(os.path.join("./extensions", extension, "docker"), os.path.join(docker_staging, "extensions", extension, "docker"))
            # Create .env file for docker
            if config['workshop']['extensions'][extension] != None:
                for var in config['workshop']['extensions'][extension]:
                    with open(os.path.join(docker_staging, "extensions", extension, "docker/.env"), 'a') as env_file:
                        env_file.write(var + '=' + config['workshop']['extensions'][extension][var] + "\n")
else:
    for line in fileinput.input(os.path.join(docker_staging, "asciidoc/workshop.adoc"), inplace=True):
        line=re.sub("^#EXTENSIONS_PLACEHOLDER#","",line)
        print(line.rstrip())


#-----------------
# Create Workshop
#-----------------

os.chdir(terraform_staging)

# Terraform init
os.system("terraform init")

# Terraform plan
os.system("terraform plan")

# Terraform apply
os.system("terraform apply -auto-approve")

# Show workshop details
os.system("terraform output -json external_ip_addresses > workshop_details.out")
if os.path.exists("workshop_details.out"):
    with open('workshop_details.out') as wd:
        ip_addresses = json.load(wd)
        print("*" * 65)
        print("\n WORKSHOP DETAILS\n Copy & paste into Google Sheets and share with the participants\n")
        print("*" * 65)
        print('=SPLIT("SSH USERNAME,GETTING STARTED URL,PARTICIPANT NAME/EMAIL",",")')
        for id, ip_address in enumerate(ip_addresses, start=1):
            print('=SPLIT("dc{:02d},http://{}", ",")'.format(id, ip_address))
        #print('=SPLIT("{}-{},http://{}", ",")'.format(config['workshop']['name'], id, ip_address))

    os.remove("workshop_details.out")
@tjunderhill
Copy link
Collaborator

Thanks @luisvillarreal, this looks interesting, I'll give it a try.

@luisvillarreal
Copy link
Author

Awesome! Let me know...

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

2 participants