Migrate jobs to new integrations tests config
Change-Id: I73330a6524756984b79a63c9983658033bc8b7b4
This commit is contained in:
parent
d564f6b2f5
commit
71d84fb953
53
config/sahara/sahara-test-config-spark.yaml
Normal file
53
config/sahara/sahara-test-config-spark.yaml
Normal file
@ -0,0 +1,53 @@
|
||||
credentials:
|
||||
os_username: %OS_USERNAME%
|
||||
os_password: %OS_PASSWORD%
|
||||
os_tenant: %OS_TENANT_NAME%
|
||||
os_auth_url: http://%OPENSTACK_HOST%:5000/v2.0
|
||||
sahara_url: http://localhost:8386/v1.1/%TENANT_ID%
|
||||
|
||||
network:
|
||||
type: %NETWORK%
|
||||
private_network: private
|
||||
public_network: public
|
||||
|
||||
clusters:
|
||||
- plugin_name: spark
|
||||
plugin_version: 1.0.0
|
||||
image: %IMAGE_NAME%
|
||||
node_group_templates:
|
||||
- name: master
|
||||
flavor_id: '20'
|
||||
node_processes:
|
||||
- master
|
||||
- namenode
|
||||
auto_security_group: true
|
||||
- name: worker
|
||||
flavor_id: '20'
|
||||
node_processes:
|
||||
- datanode
|
||||
- slave
|
||||
auto_security_group: true
|
||||
cluster_template:
|
||||
name: spark-1-0-0
|
||||
node_group_templates:
|
||||
master: 1
|
||||
worker: 1
|
||||
cluster_configs:
|
||||
HDFS:
|
||||
dfs.replication: 1
|
||||
cluster:
|
||||
name: %CLUSTER_NAME%-spark
|
||||
scenario:
|
||||
- run_jobs
|
||||
edp_jobs_flow: spark_edp
|
||||
|
||||
edp_jobs_flow:
|
||||
spark_edp:
|
||||
- type: Spark
|
||||
main_lib:
|
||||
type: database
|
||||
source: etc/edp-examples/edp-spark/spark-example.jar
|
||||
configs:
|
||||
edp.java.main_class: org.apache.spark.examples.SparkPi
|
||||
args:
|
||||
- 4
|
147
config/sahara/sahara-test-config-vanilla-2.6.yaml
Normal file
147
config/sahara/sahara-test-config-vanilla-2.6.yaml
Normal file
@ -0,0 +1,147 @@
|
||||
credentials:
|
||||
os_username: %OS_USERNAME%
|
||||
os_password: %OS_PASSWORD%
|
||||
os_tenant: %OS_TENANT_NAME%
|
||||
os_auth_url: http://%OPENSTACK_HOST%:5000/v2.0
|
||||
sahara_url: http://localhost:8386/v1.1/%TENANT_ID%
|
||||
|
||||
network:
|
||||
type: %NETWORK%
|
||||
private_network: private
|
||||
public_network: public
|
||||
|
||||
clusters:
|
||||
- plugin_name: vanilla
|
||||
plugin_version: 2.6.0
|
||||
image: %IMAGE_NAME%
|
||||
node_group_templates:
|
||||
- name: worker-dn-nm
|
||||
flavor_id: '20'
|
||||
node_processes:
|
||||
- datanode
|
||||
- nodemanager
|
||||
volumes_per_node: 2
|
||||
volumes_size: 2
|
||||
auto_security_group: true
|
||||
node_configs:
|
||||
&ng_configs
|
||||
MapReduce:
|
||||
yarn.app.mapreduce.am.resource.mb: 256
|
||||
yarn.app.mapreduce.am.command-opts: -Xmx256m
|
||||
YARN:
|
||||
yarn.scheduler.minimum-allocation-mb: 256
|
||||
yarn.scheduler.maximum-allocation-mb: 1024
|
||||
yarn.nodemanager.vmem-check-enabled: false
|
||||
- name: worker-nm
|
||||
flavor_id: '20'
|
||||
node_processes:
|
||||
- nodemanager
|
||||
auto_security_group: true
|
||||
node_configs:
|
||||
*ng_configs
|
||||
- name: worker-dn
|
||||
flavor_id: '20'
|
||||
node_processes:
|
||||
- datanode
|
||||
volumes_per_node: 2
|
||||
volumes_size: 2
|
||||
auto_security_group: true
|
||||
node_configs:
|
||||
*ng_configs
|
||||
- name: master-rm-nn-hvs
|
||||
flavor_id: '20'
|
||||
node_processes:
|
||||
- namenode
|
||||
- resourcemanager
|
||||
- hiveserver
|
||||
auto_security_group: true
|
||||
node_configs:
|
||||
*ng_configs
|
||||
- name: master-oo-hs-sn
|
||||
flavor_id: '20'
|
||||
node_processes:
|
||||
- oozie
|
||||
- historyserver
|
||||
- secondarynamenode
|
||||
auto_security_group: true
|
||||
node_configs:
|
||||
*ng_configs
|
||||
cluster:
|
||||
name: %CLUSTER_NAME%-vanilla-v2
|
||||
cluster_template:
|
||||
name: vanilla-2-6-0
|
||||
node_group_templates:
|
||||
master-rm-nn-hvs: 1
|
||||
master-oo-hs-sn: 1
|
||||
worker-dn-nm: 2
|
||||
worker-dn: 1
|
||||
worker-nm: 1
|
||||
cluster_configs:
|
||||
HDFS:
|
||||
dfs.replication: 1
|
||||
scaling:
|
||||
- operation: resize
|
||||
node_group: worker-dn-nm
|
||||
size: 1
|
||||
- operation: resize
|
||||
node_group: worker-dn
|
||||
size: 0
|
||||
- operation: resize
|
||||
node_group: worker-nm
|
||||
size: 0
|
||||
- operation: add
|
||||
node_group: worker-dn
|
||||
size: 1
|
||||
- operation: add
|
||||
node_group: worker-nm
|
||||
size: 1
|
||||
edp_jobs_flow: vanilla_flow
|
||||
|
||||
edp_jobs_flow:
|
||||
vanilla_flow:
|
||||
- type: Pig
|
||||
input_datasource:
|
||||
type: swift
|
||||
source: etc/edp-examples/edp-pig/trim-spaces/data/input
|
||||
output_datasource:
|
||||
type: hdfs
|
||||
destination: /user/hadoop/edp-output
|
||||
main_lib:
|
||||
type: swift
|
||||
source: etc/edp-examples/edp-pig/trim-spaces/example.pig
|
||||
additional_libs:
|
||||
- type: swift
|
||||
source: etc/edp-examples/edp-pig/trim-spaces/udf.jar
|
||||
- type: MapReduce
|
||||
input_datasource:
|
||||
type: swift
|
||||
source: etc/edp-examples/edp-pig/trim-spaces/data/input
|
||||
output_datasource:
|
||||
type: hdfs
|
||||
destination: /user/hadoop/edp-output
|
||||
additional_libs:
|
||||
- type: database
|
||||
source: etc/edp-examples/edp-mapreduce/edp-mapreduce.jar
|
||||
configs:
|
||||
mapred.mapper.class: org.apache.oozie.example.SampleMapper
|
||||
mapred.reducer.class: org.apache.oozie.example.SampleReducer
|
||||
- type: MapReduce.Streaming
|
||||
input_datasource:
|
||||
type: swift
|
||||
source: etc/edp-examples/edp-pig/trim-spaces/data/input
|
||||
output_datasource:
|
||||
type: hdfs
|
||||
destination: /user/hadoop/edp-output
|
||||
configs:
|
||||
edp.streaming.mapper: /bin/cat
|
||||
edp.streaming.reducer: /usr/bin/wc
|
||||
- type: Java
|
||||
additional_libs:
|
||||
- type: database
|
||||
source: etc/edp-examples/hadoop2/edp-java/hadoop-mapreduce-examples-2.6.0.jar
|
||||
configs:
|
||||
edp.java.main_class: org.apache.hadoop.examples.QuasiMonteCarlo
|
||||
args:
|
||||
- 10
|
||||
- 10
|
||||
|
@ -29,8 +29,9 @@ projects:
|
||||
check:
|
||||
- gate-sahara-neutron-direct-vanilla_1-aio
|
||||
- gate-sahara-neutron-heat-vanilla_2.4
|
||||
- gate-sahara-neutron-heat-vanilla_2.6
|
||||
- gate-sahara-neutron-heat-vanilla_2.6-scenario
|
||||
- gate-sahara-neutron-direct-spark-aio
|
||||
- gate-sahara-neutron-direct-spark-aio-scenario
|
||||
- gate-sahara-neutron-direct-transient
|
||||
- gate-sahara-neutron-heat-transient
|
||||
- gate-sahara-nova-direct-cdh_ubuntu-aio
|
||||
@ -44,8 +45,9 @@ projects:
|
||||
check:
|
||||
- gate-saharaclient-neutron-direct-vanilla_1-aio
|
||||
- gate-saharaclient-neutron-heat-vanilla_2.4
|
||||
- gate-saharaclient-neutron-heat-vanilla_2.6
|
||||
- gate-saharaclient-neutron-heat-vanilla_2.6-scenario
|
||||
- gate-saharaclient-neutron-direct-spark-aio
|
||||
- gate-saharaclient-neutron-direct-spark-aio-scenario
|
||||
- gate-saharaclient-neutron-direct-transient
|
||||
- gate-saharaclient-neutron-heat-transient
|
||||
- gate-saharaclient-nova-direct-cdh_ubuntu-aio
|
||||
@ -71,10 +73,11 @@ projects:
|
||||
- dib-neutron-heat-vanilla_2.4-ubuntu
|
||||
- dib-neutron-heat-vanilla_2.4-fedora
|
||||
- dib-neutron-heat-vanilla_2.4-centos
|
||||
- dib-neutron-heat-vanilla_2.6-ubuntu
|
||||
- dib-neutron-heat-vanilla_2.6-fedora
|
||||
- dib-neutron-heat-vanilla_2.6-centos
|
||||
- dib-neutron-heat-vanilla_2.6-ubuntu-scenario
|
||||
- dib-neutron-heat-vanilla_2.6-fedora-scenario
|
||||
- dib-neutron-heat-vanilla_2.6-centos-scenario
|
||||
- dib-nova-direct-spark-aio
|
||||
- dib-nova-direct-spark-aio-scenario
|
||||
- dib-nova-direct-hdp_1-aio
|
||||
- dib-nova-heat-hdp_2
|
||||
- dib-nova-direct-cdh-ubuntu-aio
|
||||
@ -86,10 +89,11 @@ projects:
|
||||
- dib-neutron-heat-vanilla_2.4-ubuntu
|
||||
- dib-neutron-heat-vanilla_2.4-fedora
|
||||
- dib-neutron-heat-vanilla_2.4-centos
|
||||
- dib-neutron-heat-vanilla_2.6-ubuntu
|
||||
- dib-neutron-heat-vanilla_2.6-fedora
|
||||
- dib-neutron-heat-vanilla_2.6-centos
|
||||
- dib-neutron-heat-vanilla_2.6-ubuntu-scenario
|
||||
- dib-neutron-heat-vanilla_2.6-fedora-scenario
|
||||
- dib-neutron-heat-vanilla_2.6-centos-scenario
|
||||
- dib-nova-direct-spark-aio
|
||||
- dib-nova-direct-spark-aio-scenario
|
||||
- dib-nova-direct-hdp_1-aio
|
||||
- dib-nova-heat-hdp_2
|
||||
- dib-nova-direct-cdh-ubuntu-aio
|
||||
@ -116,14 +120,26 @@ jobs:
|
||||
branch: ^(stable/juno)$
|
||||
- name: ^dib-neutron-heat-vanilla_2.4-.*$
|
||||
branch: ^(stable/juno)$
|
||||
- name: gate-sahara-neutron-heat-vanilla_2.6
|
||||
- name: gate-sahara-neutron-heat-vanilla_2.6-scenario
|
||||
branch: ^(master)$
|
||||
- name: gate-saharaclient-neutron-heat-vanilla_2.6
|
||||
- name: gate-saharaclient-neutron-heat-vanilla_2.6-scenario
|
||||
branch: ^(master)$
|
||||
- name: ^dib-neutron-heat-vanilla_2.6-.*$
|
||||
branch: ^(master)$
|
||||
- name: tempest-sahara-tests
|
||||
branch: ^(master)$
|
||||
- name: dib-nova-direct-spark-aio
|
||||
branch: ^(stable/juno)$
|
||||
- name: gate-saharaclient-neutron-direct-spark-aio
|
||||
branch: ^(stable/juno)$
|
||||
- name: gate-sahara-neutron-direct-spark-aio
|
||||
branch: ^(stable/juno)$
|
||||
- name: dib-nova-direct-spark-aio-scenario
|
||||
branch: ^(master)$
|
||||
- name: gate-saharaclient-neutron-direct-spark-aio-scenario
|
||||
branch: ^(master)$
|
||||
- name: gate-sahara-neutron-direct-spark-aio-scenario
|
||||
branch: ^(master)$
|
||||
# - name: gate-ui-tests
|
||||
# voting: false
|
||||
# files:
|
||||
|
@ -55,15 +55,53 @@
|
||||
- trigger-cleanup
|
||||
node: 'trusty-nova'
|
||||
|
||||
- job-template:
|
||||
name: 'dib-neutron-heat-{plugin}-{os}'
|
||||
- job:
|
||||
name: 'dib-nova-direct-spark-aio-scenario'
|
||||
defaults: global
|
||||
concurrent: false
|
||||
builders:
|
||||
- gerrit-git-prep
|
||||
- shell: "rm -rf sahara-ci-config\
|
||||
\ngit clone https://git.openstack.org/stackforge/sahara-ci-config\
|
||||
\nFUNCTION_PATH=sahara-ci-config/slave-scripts/functions.sh bash -x sahara-ci-config/slave-scripts/dib.sh {plugin} {os}"
|
||||
\nFUNCTION_PATH=sahara-ci-config/slave-scripts/functions.sh bash -x sahara-ci-config/slave-scripts/dib.sh spark"
|
||||
|
||||
properties:
|
||||
- zeromq-event
|
||||
|
||||
publishers:
|
||||
- sahara-logs
|
||||
- console-log
|
||||
- trigger-cleanup
|
||||
node: 'trusty-nova'
|
||||
|
||||
- job-template:
|
||||
name: 'dib-neutron-heat-vanilla_2.4-{os}'
|
||||
defaults: global
|
||||
concurrent: false
|
||||
builders:
|
||||
- gerrit-git-prep
|
||||
- shell: "rm -rf sahara-ci-config\
|
||||
\ngit clone https://git.openstack.org/stackforge/sahara-ci-config\
|
||||
\nFUNCTION_PATH=sahara-ci-config/slave-scripts/functions.sh bash -x sahara-ci-config/slave-scripts/dib.sh vanilla_2.4 {os}"
|
||||
|
||||
properties:
|
||||
- zeromq-event
|
||||
|
||||
publishers:
|
||||
- sahara-logs
|
||||
- console-log
|
||||
- trigger-cleanup
|
||||
node: 'trusty-neutron'
|
||||
|
||||
- job-template:
|
||||
name: 'dib-neutron-heat-vanilla_2.6-{os}-scenario'
|
||||
defaults: global
|
||||
concurrent: false
|
||||
builders:
|
||||
- gerrit-git-prep
|
||||
- shell: "rm -rf sahara-ci-config\
|
||||
\ngit clone https://git.openstack.org/stackforge/sahara-ci-config\
|
||||
\nFUNCTION_PATH=sahara-ci-config/slave-scripts/functions.sh bash -x sahara-ci-config/slave-scripts/dib.sh vanilla_2.6 {os}"
|
||||
|
||||
properties:
|
||||
- zeromq-event
|
||||
@ -98,8 +136,6 @@
|
||||
plugin:
|
||||
- spark
|
||||
- hdp_1
|
||||
- 'dib-neutron-heat-{plugin}-{os}':
|
||||
plugin:
|
||||
- vanilla_2.4
|
||||
- vanilla_2.6
|
||||
|
||||
- 'dib-nova-direct-spark-aio-scenario'
|
||||
- 'dib-neutron-heat-vanilla_2.4-{os}'
|
||||
- 'dib-neutron-heat-vanilla_2.6-{os}-scenario'
|
||||
|
@ -142,8 +142,9 @@
|
||||
plugin-neutron:
|
||||
- direct-vanilla_1-aio
|
||||
- heat-vanilla_2.4
|
||||
- heat-vanilla_2.6
|
||||
- heat-vanilla_2.6-scenario
|
||||
- direct-spark-aio
|
||||
- direct-spark-aio-scenario
|
||||
- direct-transient
|
||||
- heat-transient
|
||||
plugin-nova_network:
|
||||
|
@ -107,6 +107,7 @@ HDP_IMAGE=$HOST-sahara-hdp-centos-${GERRIT_CHANGE_NUMBER}-hadoop_1
|
||||
HDP_TWO_IMAGE=$HOST-sahara-hdp-centos-${GERRIT_CHANGE_NUMBER}-hadoop_2
|
||||
SPARK_IMAGE=$HOST-sahara-spark-ubuntu-${GERRIT_CHANGE_NUMBER}
|
||||
CDH_IMAGE=$HOST-${image_type}-cdh-${GERRIT_CHANGE_NUMBER}
|
||||
TESTS_CONFIG_FILE='sahara/tests/integration/configs/itest.conf'
|
||||
|
||||
if [[ "$ENGINE_TYPE" == 'heat' ]]
|
||||
then
|
||||
@ -155,6 +156,7 @@ case $plugin in
|
||||
if [ "$image_type" != "ubuntu" ] ; then
|
||||
SKIP_EDP_JOB_TYPES=Hive
|
||||
fi
|
||||
TESTS_CONFIG_FILE="$WORKSPACE/sahara-ci-config/config/sahara/sahara-test-config-vanilla-2.6.yaml"
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
@ -170,6 +172,7 @@ case $plugin in
|
||||
[ "$ZUUL_BRANCH" == "stable/icehouse" ] && echo "Tests for Spark plugin is not implemented in stable/icehouse" && exit 0
|
||||
upload_image "spark" "ubuntu" ${SPARK_IMAGE}
|
||||
PLUGIN_TYPE=$plugin
|
||||
[[ "$JOB_NAME" =~ scenario ]] && TESTS_CONFIG_FILE="$WORKSPACE/sahara-ci-config/config/sahara/sahara-test-config-spark.yaml"
|
||||
;;
|
||||
|
||||
hdp_1)
|
||||
@ -231,7 +234,7 @@ start_sahara etc/sahara/sahara.conf
|
||||
cd /tmp/sahara
|
||||
|
||||
CLUSTER_NAME="$HOST-$image_os-$hadoop_version-$BUILD_NUMBER-$ZUUL_CHANGE-$ZUUL_PATCHSET"
|
||||
write_tests_conf sahara/tests/integration/configs/itest.conf
|
||||
write_tests_conf
|
||||
|
||||
run_tests
|
||||
|
||||
|
@ -113,7 +113,7 @@ start_sahara() {
|
||||
echo "Command 'sahara-db-manage' failed"
|
||||
exit 1
|
||||
fi
|
||||
if [ "$ZUUL_BRANCH" == "master" -a \( "$PLUGIN_TYPE" == "vanilla2" -a "$hadoop_version" == "2-4" -o "$PLUGIN_TYPE" == "hdp2" -o "$PLUGIN_TYPE" == " transient" \) ]; then
|
||||
if [ "$ZUUL_BRANCH" == "master" -a \( "$PLUGIN_TYPE" == "vanilla2" -a "$hadoop_version" == "2-6" -o "$PLUGIN_TYPE" == "hdp2" -o "$PLUGIN_TYPE" == " transient" \) -o "$hadoop_version" == "2-4" ]; then
|
||||
screen -dmS sahara-api /bin/bash -c "PYTHONUNBUFFERED=1 sahara-api --config-dir $conf_dir -d --log-file logs/sahara-log-api.txt"
|
||||
sleep 2
|
||||
screen -dmS sahara-engine_1 /bin/bash -c "PYTHONUNBUFFERED=1 sahara-engine --config-dir $conf_dir -d --log-file logs/sahara-log-engine-1.txt"
|
||||
@ -130,10 +130,33 @@ start_sahara() {
|
||||
fi
|
||||
}
|
||||
|
||||
write_tests_conf() {
|
||||
test_conf_path=$1
|
||||
insert_scenario_value() {
|
||||
value=$1
|
||||
sed -i "s/%${value}%/${!value}/g" $TESTS_CONFIG_FILE
|
||||
}
|
||||
|
||||
echo "[COMMON]
|
||||
write_tests_conf() {
|
||||
if [[ "$JOB_NAME" =~ scenario ]]; then
|
||||
case $PLUGIN_TYPE in
|
||||
vanilla2)
|
||||
IMAGE_NAME="$VANILLA_TWO_IMAGE"
|
||||
;;
|
||||
spark)
|
||||
IMAGE_NAME="$SPARK_IMAGE"
|
||||
;;
|
||||
esac
|
||||
[ "$USE_NEUTRON" == "true" ] && NETWORK="neutron"
|
||||
[ "$USE_NEUTRON" == "false" ] && NETWORK="nova-network"
|
||||
insert_scenario_value OS_USERNAME
|
||||
insert_scenario_value OS_PASSWORD
|
||||
insert_scenario_value OS_TENANT_NAME
|
||||
insert_scenario_value OPENSTACK_HOST
|
||||
insert_scenario_value NETWORK
|
||||
insert_scenario_value CLUSTER_NAME
|
||||
insert_scenario_value TENANT_ID
|
||||
insert_scenario_value IMAGE_NAME
|
||||
else
|
||||
echo "[COMMON]
|
||||
OS_USERNAME = 'ci-user'
|
||||
OS_PASSWORD = 'nova'
|
||||
OS_TENANT_NAME = 'ci'
|
||||
@ -170,15 +193,15 @@ SKIP_SWIFT_TEST = $SKIP_SWIFT_TEST
|
||||
SKIP_SCALING_TEST = $SKIP_SCALING_TEST
|
||||
SKIP_EDP_TEST = $SKIP_EDP_TEST
|
||||
SKIP_EDP_JOB_TYPES = $SKIP_EDP_JOB_TYPES
|
||||
" >> $test_conf_path
|
||||
" >> $TESTS_CONFIG_FILE
|
||||
|
||||
if [ "$PLUGIN_TYPE" == "transient" ]; then
|
||||
if [ "$ZUUL_BRANCH" == "master" ]; then
|
||||
echo "HADOOP_VERSION = '2.6.0'
|
||||
" >> $test_conf_path
|
||||
" >> $TESTS_CONFIG_FILE
|
||||
elif [[ "$ZUUL_BRANCH" =~ juno ]]; then
|
||||
echo "HADOOP_VERSION = '2.4.1'
|
||||
" >> $test_conf_path
|
||||
" >> $TESTS_CONFIG_FILE
|
||||
fi
|
||||
fi
|
||||
|
||||
@ -190,7 +213,7 @@ if [ "$PLUGIN_TYPE" == "vanilla2" -a \( "$hadoop_version" == "2-4" -o "$hadoop_v
|
||||
fi
|
||||
echo "HADOOP_VERSION = '${version}'
|
||||
HADOOP_EXAMPLES_JAR_PATH = '/opt/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-examples-${version}.jar'
|
||||
" >> $test_conf_path
|
||||
" >> $TESTS_CONFIG_FILE
|
||||
fi
|
||||
|
||||
echo "[HDP]
|
||||
@ -225,7 +248,8 @@ IMAGE_NAME = '$SPARK_IMAGE'
|
||||
SKIP_ALL_TESTS_FOR_PLUGIN = $SKIP_ALL_TESTS_FOR_PLUGIN
|
||||
SKIP_EDP_TEST = $SKIP_EDP_TEST
|
||||
SKIP_SCALING_TEST = $SKIP_SCALING_TEST
|
||||
" >> $test_conf_path
|
||||
" >> $TESTS_CONFIG_FILE
|
||||
fi
|
||||
}
|
||||
|
||||
run_tests() {
|
||||
@ -252,8 +276,13 @@ run_tests() {
|
||||
STATUS=$?
|
||||
;;
|
||||
vanilla2)
|
||||
tox -e integration -- vanilla2 --concurrency=1
|
||||
STATUS=$?
|
||||
if [[ "$JOB_NAME" =~ scenario ]]; then
|
||||
tox -e scenario $TESTS_CONFIG_FILE
|
||||
STATUS=$?
|
||||
else
|
||||
tox -e integration -- vanilla2 --concurrency=1
|
||||
STATUS=$?
|
||||
fi
|
||||
;;
|
||||
transient)
|
||||
tox -e integration -- transient --concurrency=3
|
||||
@ -264,8 +293,13 @@ run_tests() {
|
||||
STATUS=$?
|
||||
;;
|
||||
spark)
|
||||
tox -e integration -- spark --concurrency=1
|
||||
STATUS=$?
|
||||
if [[ "$JOB_NAME" =~ scenario ]]; then
|
||||
tox -e scenario $TESTS_CONFIG_FILE
|
||||
STATUS=$?
|
||||
else
|
||||
tox -e integration -- spark --concurrency=1
|
||||
STATUS=$?
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
@ -27,6 +27,7 @@ VANILLA_TWO_IMAGE=ubuntu_vanilla_2.4_latest
|
||||
VANILLA_TWO_SIX_IMAGE=ubuntu_vanilla_2.6_latest
|
||||
SPARK_IMAGE=sahara_spark_latest
|
||||
HEAT_JOB=False
|
||||
TESTS_CONFIG_FILE='sahara/tests/integration/configs/itest.conf'
|
||||
|
||||
if [[ "$ENGINE_TYPE" == 'heat' ]]
|
||||
then
|
||||
@ -58,6 +59,7 @@ case $JOB_TYPE in
|
||||
hadoop_version=2-6
|
||||
VANILLA_TWO_IMAGE=$VANILLA_TWO_SIX_IMAGE
|
||||
[ "$ZUUL_BRANCH" == "stable/icehouse" -o "$ZUUL_BRANCH" == "stable/juno" ] && echo "Vanilla 2.6 plugin is not supported in stable/icehouse and stable/juno" && exit 0
|
||||
TESTS_CONFIG_FILE="sahara-ci-config/config/sahara/sahara-test-config-vanilla-2.6.yaml"
|
||||
fi
|
||||
echo "Vanilla2 detected"
|
||||
fi
|
||||
@ -92,6 +94,7 @@ case $JOB_TYPE in
|
||||
SKIP_EDP_TEST=False
|
||||
SKIP_SCALING_TEST=False
|
||||
[ "$ZUUL_BRANCH" == "stable/icehouse" ] && echo "Spark plugin is not supported in stable/icehouse" && exit 0
|
||||
[[ "$JOB_NAME" =~ scenario ]] && TESTS_CONFIG_FILE="sahara-ci-config/config/sahara/sahara-test-config-spark.yaml"
|
||||
echo "Spark detected"
|
||||
;;
|
||||
esac
|
||||
@ -110,7 +113,7 @@ start_sahara etc/sahara/sahara.conf
|
||||
cd $WORKSPACE
|
||||
|
||||
CLUSTER_NAME="$HOST-$hadoop_version-$BUILD_NUMBER-$ZUUL_CHANGE-$ZUUL_PATCHSET"
|
||||
write_tests_conf $WORKSPACE/sahara/tests/integration/configs/itest.conf
|
||||
write_tests_conf
|
||||
|
||||
run_tests
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user