Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

test: test jenkins job changes #1674

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
73 changes: 73 additions & 0 deletions dataeng/jobs/analytics/ModelTransfersJenkinsTest.groovy
@@ -0,0 +1,73 @@
package analytics
import static org.edx.jenkins.dsl.AnalyticsConstants.secure_scm
import static org.edx.jenkins.dsl.AnalyticsConstants.common_log_rotator
import static org.edx.jenkins.dsl.AnalyticsConstants.common_wrappers
import static org.edx.jenkins.dsl.AnalyticsConstants.common_publishers
import static org.edx.jenkins.dsl.AnalyticsConstants.common_triggers
import static org.edx.jenkins.dsl.AnalyticsConstants.secure_scm_parameters
import static org.edx.jenkins.dsl.AnalyticsConstants.common_authorization

class ModelTransfersJenkinsTest{
public static def job = { dslFactory, allVars ->
allVars.get('ENVIRONMENTS').each { environment, env_config ->
dslFactory.job("transfer-dbt-models-$environment"){
authorization common_authorization(env_config)
logRotator common_log_rotator(allVars)
parameters secure_scm_parameters(allVars)
parameters {
stringParam('WAREHOUSE_TRANSFORMS_URL', allVars.get('WAREHOUSE_TRANSFORMS_URL'), 'URL for the Warehouse Transforms Repo.')
stringParam('WAREHOUSE_TRANSFORMS_BRANCH', allVars.get('WAREHOUSE_TRANSFORMS_BRANCH'), 'Branch of Warehouse Transforms to use.')
stringParam('DBT_PROJECT', env_config.get('DBT_PROJECT', allVars.get('DBT_PROJECT')), 'dbt project in warehouse-transforms to work on.')
stringParam('DBT_PROFILE', env_config.get('DBT_PROFILE', allVars.get('DBT_PROFILE')), 'dbt profile from analytics-secure to work on.')
stringParam('DBT_TARGET', env_config.get('DBT_TARGET', allVars.get('DBT_TARGET')), 'dbt target from analytics-secure to work on.')
stringParam('MODELS_TO_TRANSFER', env_config.get('MODELS_TO_TRANSFER'), 'Name of DBT models which should be transferred to S3 via a Snowflake stage.')
stringParam('NOTIFY', env_config.get('NOTIFY', allVars.get('NOTIFY','$PAGER_NOTIFY')), 'Space separated list of emails to send notifications to.')
}
environmentVariables {
env('PREFECT_VAULT_KV_PATH', allVars.get('PREFECT_VAULT_KV_PATH'))
env('PREFECT_VAULT_KV_VERSION', allVars.get('PREFECT_VAULT_KV_VERSION'))
}
multiscm secure_scm(allVars) << {
git {
remote {
url('$WAREHOUSE_TRANSFORMS_URL')
branch('$WAREHOUSE_TRANSFORMS_BRANCH')
credentials('1')
}
extensions {
relativeTargetDirectory('warehouse-transforms')
pruneBranches()
cleanAfterCheckout()
}
}
git {
remote {
url('$PREFECT_FLOWS_URL')
branch('$PREFECT_FLOWS_BRANCH')
}
extensions {
relativeTargetDirectory('prefect-flows')
pruneBranches()
cleanAfterCheckout()
}
}
}
triggers common_triggers(allVars, env_config)
wrappers common_wrappers(allVars)
wrappers {
colorizeOutput('xterm')
timestamps()
credentialsBinding {
usernamePassword('ANALYTICS_VAULT_ROLE_ID', 'ANALYTICS_VAULT_SECRET_ID', 'analytics-vault');
}
}
publishers common_publishers(allVars)
steps {
shell(dslFactory.readFileFromWorkspace('dataeng/resources/model-transfers.sh'))
shell(dslFactory.readFileFromWorkspace('dataeng/resources/trigger-prefect-flow.sh'))
}
}
}
}
}

2 changes: 2 additions & 0 deletions dataeng/jobs/createJobsNew.groovy
Expand Up @@ -4,6 +4,7 @@ import static analytics.DBTSourceFreshness.job as DBTSourceFreshnessJob
import static analytics.DeployCluster.job as DeployClusterJob
import static analytics.EmrCostReporter.job as EmrCostReporterJob
import static analytics.ModelTransfers.job as ModelTransfersJob
import static analytics.ModelTransfersJenkinsTest.job as ModelTransfersJenkinsTestJob
import static analytics.RetirementJobEdxTriggers.job as RetirementJobEdxTriggersJob
import static analytics.RetirementJobs.job as RetirementJobsJob
import static analytics.SnowflakeCollectMetrics.job as SnowflakeCollectMetricsJob
Expand Down Expand Up @@ -46,6 +47,7 @@ def taskMap = [
DEPLOY_CLUSTER_JOB: DeployClusterJob,
EMR_COST_REPORTER_JOB: EmrCostReporterJob,
MODEL_TRANSFERS_JOB: ModelTransfersJob,
MODEL_TRANSFERS_TEST_JOB: ModelTransfersJenkinsTestJob,
RETIREMENT_JOB_EDX_TRIGGERS_JOB: RetirementJobEdxTriggersJob,
RETIREMENT_JOBS_JOB: RetirementJobsJob,
SNOWFLAKE_COLLECT_METRICS_JOB: SnowflakeCollectMetricsJob,
Expand Down
37 changes: 37 additions & 0 deletions dataeng/resources/trigger-prefect-flow.sh
@@ -0,0 +1,37 @@
#!/usr/bin/env bash
set -ex

# Creating python3.8 virtual env
PYTHON_VENV="python_venv"
virtualenv --python=python3.8 --clear "${PYTHON_VENV}"
source "${PYTHON_VENV}/bin/activate"

# Install prefect python pkg
cd $WORKSPACE/prefect-flows
pip install -r requirements.txt

# Do not print commands in this function since they may contain secrets.
set +x

# Retrieve a vault token corresponding to the jenkins AppRole. The token is then stored in the VAULT_TOKEN variable
# which is implicitly used by subsequent vault commands within this script.
# Instructions followed: https://learn.hashicorp.com/tutorials/vault/approle#step-4-login-with-roleid-secretid
export VAULT_TOKEN=$(vault write -field=token auth/approle/login \
role_id=${ANALYTICS_VAULT_ROLE_ID} \
secret_id=${ANALYTICS_VAULT_SECRET_ID}
)

PREFECT_CLOUD_AGENT_TOKEN=$(
vault kv get \
-version=${PREFECT_VAULT_KV_VERSION} \
-field=PREFECT_CLOUD_AGENT_TOKEN \
${PREFECT_VAULT_KV_PATH} \
)

# Get Authenticated with Prefect Cloud
prefect auth login --key $PREFECT_CLOUD_AGENT_TOKEN

set -x

# Trigger prefect flow
prefect run --id $ENTERPRISE_PREFECT_FLOW_ID