Bump com.google.protobuf:protobuf-java from 2.5.0 to 3.25.5 in /hbase #13
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: core | |
on: | |
push: | |
pull_request: | |
branches: | |
- master | |
- branch-* | |
types: [opened, synchronize] | |
env: | |
# Disable keepAlive and pool | |
# https://github.com/actions/virtual-environments/issues/1499#issuecomment-689467080 | |
MAVEN_OPTS: >- | |
-Xms1024M -Xmx2048M -XX:MaxMetaspaceSize=1024m -XX:-UseGCOverheadLimit -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn | |
-Dhttp.keepAlive=false | |
-Dmaven.wagon.http.pool=false | |
-Dmaven.wagon.http.retryHandler.count=3 | |
ZEPPELIN_HELIUM_REGISTRY: helium | |
SPARK_PRINT_LAUNCH_COMMAND: "true" | |
SPARK_LOCAL_IP: 127.0.0.1 | |
ZEPPELIN_LOCAL_IP: 127.0.0.1 | |
# Use the bash login, because we are using miniconda | |
defaults: | |
run: | |
shell: bash -l {0} | |
jobs: | |
test-core-modules: | |
runs-on: ubuntu-20.04 | |
strategy: | |
fail-fast: false | |
matrix: | |
hadoop: [hadoop2, hadoop3] | |
python: [3.7, 3.8] | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v2 | |
- name: Tune Runner VM | |
uses: ./.github/actions/tune-runner-vm | |
- name: Set up JDK 8 | |
uses: actions/setup-java@v2 | |
with: | |
distribution: 'adopt' | |
java-version: 8 | |
- name: Cache local Maven repository | |
uses: actions/cache@v2 | |
with: | |
path: | | |
~/.m2/repository | |
!~/.m2/repository/org/apache/zeppelin/ | |
key: ${{ runner.os }}-zeppelin-${{ hashFiles('**/pom.xml') }} | |
restore-keys: | | |
${{ runner.os }}-zeppelin- | |
- name: Setup conda environment with python ${{ matrix.python }} and R | |
uses: conda-incubator/setup-miniconda@v2 | |
with: | |
activate-environment: python_3_with_R | |
environment-file: testing/env_python_${{ matrix.python }}_with_R.yml | |
python-version: ${{ matrix.python }} | |
mamba-version: "*" | |
channels: conda-forge,defaults | |
channel-priority: true | |
auto-activate-base: false | |
use-mamba: true | |
- name: Make IRkernel available to Jupyter | |
run: | | |
R -e "IRkernel::installspec()" | |
- name: install application with some interpreter | |
run: mvn install -Pbuild-distr -DskipRat -DskipTests -pl zeppelin-server,zeppelin-web,spark-submit,spark/spark-dependencies,markdown,angular,shell -am -Phelium-dev -Pexamples -P${{ matrix.hadoop }} -B | |
- name: install and test plugins | |
run: mvn package -DskipRat -pl zeppelin-plugins -amd -B | |
- name: run tests with ${{ matrix.hadoop }} | |
run: mvn verify -Pusing-packaged-distr -DskipRat -pl zeppelin-server,zeppelin-web,spark-submit,spark/spark-dependencies,markdown,angular,shell -am -Phelium-dev -Pexamples -P${{ matrix.hadoop }} -Dtests.to.exclude=**/org/apache/zeppelin/spark/* -DfailIfNoTests=false | |
test-interpreter-modules: | |
runs-on: ubuntu-20.04 | |
env: | |
INTERPRETERS: 'beam,hbase,pig,jdbc,file,flink,flink-cmd,ignite,kylin,lens,cassandra,elasticsearch,bigquery,alluxio,scio,livy,groovy,sap,java,geode,neo4j,hazelcastjet,submarine,sparql,mongodb' | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v2 | |
- name: Tune Runner VM | |
uses: ./.github/actions/tune-runner-vm | |
- name: Set up JDK 8 | |
uses: actions/setup-java@v2 | |
with: | |
distribution: 'adopt' | |
java-version: 8 | |
- name: Cache local Maven repository | |
uses: actions/cache@v2 | |
with: | |
path: | | |
~/.m2/repository | |
!~/.m2/repository/org/apache/zeppelin/ | |
key: ${{ runner.os }}-zeppelin-${{ hashFiles('**/pom.xml') }} | |
restore-keys: | | |
${{ runner.os }}-zeppelin- | |
- name: Setup conda environment with python 3.7 and R | |
uses: conda-incubator/setup-miniconda@v2 | |
with: | |
activate-environment: python_3_with_R_and_tensorflow | |
environment-file: testing/env_python_3_with_R_and_tensorflow.yml | |
python-version: 3.7 | |
mamba-version: "*" | |
channels: conda-forge,defaults | |
channel-priority: true | |
auto-activate-base: false | |
use-mamba: true | |
- name: Make IRkernel available to Jupyter | |
run: | | |
R -e "IRkernel::installspec()" | |
- name: verify interpreter | |
run: mvn verify -DskipRat -am -pl .,zeppelin-interpreter,zeppelin-interpreter-shaded,${INTERPRETERS} -Pscala-2.10 -B | |
test-zeppelin-client-integration-test: | |
runs-on: ubuntu-20.04 | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v2 | |
- name: Tune Runner VM | |
uses: ./.github/actions/tune-runner-vm | |
- name: Set up JDK 8 | |
uses: actions/setup-java@v2 | |
with: | |
distribution: 'adopt' | |
java-version: 8 | |
- name: Cache local Maven repository | |
uses: actions/cache@v2 | |
with: | |
path: | | |
~/.m2/repository | |
!~/.m2/repository/org/apache/zeppelin/ | |
key: ${{ runner.os }}-zeppelin-${{ hashFiles('**/pom.xml') }} | |
restore-keys: | | |
${{ runner.os }}-zeppelin- | |
- name: Setup conda environment with python 3.7 and R | |
uses: conda-incubator/setup-miniconda@v2 | |
with: | |
activate-environment: python_3_with_R | |
environment-file: testing/env_python_3_with_R.yml | |
python-version: 3.7 | |
mamba-version: "*" | |
channels: conda-forge,defaults | |
channel-priority: true | |
auto-activate-base: false | |
use-mamba: true | |
- name: Make IRkernel available to Jupyter | |
run: | | |
R -e "IRkernel::installspec()" | |
- name: install environment | |
run: | | |
mvn install -DskipTests -DskipRat -Pintegration -pl zeppelin-interpreter-integration,zeppelin-web,spark-submit,spark/spark-dependencies,markdown,flink-cmd,flink/flink-scala-2.11,flink/flink-scala-2.12,jdbc,shell -am | |
mvn package -DskipRat -pl zeppelin-plugins -amd -DskipTests -B | |
- name: run tests | |
run: mvn test -DskipRat -pl zeppelin-interpreter-integration -Pintegration -DfailIfNoTests=false -Dtest=ZeppelinClientIntegrationTest,ZeppelinClientWithAuthIntegrationTest,ZSessionIntegrationTest | |
test-flink-and-flink-integration-test: | |
runs-on: ubuntu-20.04 | |
strategy: | |
fail-fast: false | |
matrix: | |
flink: [110, 111, 112, 113, 114] | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v2 | |
- name: Tune Runner VM | |
uses: ./.github/actions/tune-runner-vm | |
- name: Set up JDK 8 | |
uses: actions/setup-java@v2 | |
with: | |
distribution: 'adopt' | |
java-version: 8 | |
- name: Cache local Maven repository | |
uses: actions/cache@v2 | |
with: | |
path: | | |
~/.m2/repository | |
!~/.m2/repository/org/apache/zeppelin/ | |
key: ${{ runner.os }}-zeppelin-${{ hashFiles('**/pom.xml') }} | |
restore-keys: | | |
${{ runner.os }}-zeppelin- | |
- name: Setup conda environment with python 3.7 and | |
uses: conda-incubator/setup-miniconda@v2 | |
with: | |
activate-environment: python_3_with_flink | |
environment-file: testing/env_python_3_with_flink_${{ matrix.flink }}.yml | |
python-version: 3.7 | |
mamba-version: "*" | |
channels: conda-forge,defaults | |
channel-priority: true | |
auto-activate-base: false | |
use-mamba: true | |
- name: install environment | |
run: | | |
mvn install -DskipTests -DskipRat -am -pl flink/flink-scala-2.11,flink/flink-scala-2.12,flink-cmd,zeppelin-interpreter-integration -Pflink-${{ matrix.flink }} -Pintegration -B | |
mvn clean package -pl zeppelin-plugins -amd -DskipTests -B | |
- name: run tests | |
run: mvn test -DskipRat -pl flink/flink-scala-2.11,flink/flink-scala-2.12,flink-cmd,zeppelin-interpreter-integration -Pflink-${{ matrix.flink }} -Pintegration -DfailIfNoTests=false -B -Dtest=org.apache.zeppelin.flink.*,FlinkIntegrationTest${{ matrix.flink }},ZeppelinFlinkClusterTest${{ matrix.flink }} | |
run-spark-intergration-test: | |
runs-on: ubuntu-20.04 | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v2 | |
- name: Tune Runner VM | |
uses: ./.github/actions/tune-runner-vm | |
- name: Set up JDK 8 | |
uses: actions/setup-java@v2 | |
with: | |
distribution: 'adopt' | |
java-version: 8 | |
- name: Cache local Maven repository | |
uses: actions/cache@v2 | |
with: | |
path: | | |
~/.m2/repository | |
!~/.m2/repository/org/apache/zeppelin/ | |
key: ${{ runner.os }}-zeppelin-${{ hashFiles('**/pom.xml') }} | |
restore-keys: | | |
${{ runner.os }}-zeppelin- | |
- name: Setup conda environment with python 3.7 and R | |
uses: conda-incubator/setup-miniconda@v2 | |
with: | |
activate-environment: python_3_with_R | |
environment-file: testing/env_python_3_with_R.yml | |
python-version: 3.7 | |
mamba-version: "*" | |
channels: conda-forge,defaults | |
channel-priority: true | |
auto-activate-base: false | |
use-mamba: true | |
- name: Make IRkernel available to Jupyter | |
run: | | |
R -e "IRkernel::installspec()" | |
- name: install environment | |
run: | | |
mvn install -DskipTests -DskipRat -pl zeppelin-interpreter-integration,zeppelin-web,spark-submit,spark/spark-dependencies,markdown -am -Phadoop2 -Pintegration -B | |
mvn clean package -pl zeppelin-plugins -amd -DskipTests -B | |
- name: run tests | |
run: mvn test -DskipRat -pl zeppelin-interpreter-integration,zeppelin-web,spark-submit,spark/spark-dependencies,markdown -am -Phadoop2 -Pintegration -B -Dtest=ZeppelinSparkClusterTest24,SparkSubmitIntegrationTest,SparkIntegrationTest24,ZeppelinSparkClusterTest23,SparkIntegrationTest23,ZeppelinSparkClusterTest22,SparkIntegrationTest22,ZeppelinSparkClusterTest30,ZeppelinSparkClusterTest31,SparkIntegrationTest30,SparkIntegrationTest31,ZeppelinSparkClusterTest32,SparkIntegrationTest32 -DfailIfNoTests=false | |
jdbcIntegrationTest-and-unit-test-of-Spark-2-4-with-Scala-2-11: | |
runs-on: ubuntu-20.04 | |
steps: | |
# user/password => root/root | |
- name: Start mysql | |
run: sudo systemctl start mysql.service | |
- name: Checkout | |
uses: actions/checkout@v2 | |
- name: Tune Runner VM | |
uses: ./.github/actions/tune-runner-vm | |
- name: Set up JDK 8 | |
uses: actions/setup-java@v2 | |
with: | |
distribution: 'adopt' | |
java-version: 8 | |
- name: Cache local Maven repository | |
uses: actions/cache@v2 | |
with: | |
path: | | |
~/.m2/repository | |
!~/.m2/repository/org/apache/zeppelin/ | |
key: ${{ runner.os }}-zeppelin-${{ hashFiles('**/pom.xml') }} | |
restore-keys: | | |
${{ runner.os }}-zeppelin- | |
- name: Setup conda environment with python 3.7 and R | |
uses: conda-incubator/setup-miniconda@v2 | |
with: | |
activate-environment: python_3_with_R | |
environment-file: testing/env_python_3_with_R.yml | |
python-version: 3.7 | |
mamba-version: "*" | |
channels: conda-forge,defaults | |
channel-priority: true | |
auto-activate-base: false | |
use-mamba: true | |
- name: Make IRkernel available to Jupyter | |
run: | | |
R -e "IRkernel::installspec()" | |
- name: install environment | |
run: | | |
mvn install -DskipTests -DskipRat -pl zeppelin-interpreter-integration,jdbc,zeppelin-web,spark-submit,spark/spark-dependencies,markdown -am -Pspark-2.4 -Pspark-scala-2.11 -Phadoop2 -Pintegration -B | |
mvn clean package -pl zeppelin-plugins -amd -DskipTests -B | |
- name: run tests | |
run: mvn test -DskipRat -pl zeppelin-interpreter-integration,jdbc,zeppelin-web,spark-submit,spark/spark-dependencies,markdown -am -Pspark-2.4 -Pspark-scala-2.11 -Phadoop2 -Pintegration -B -Dtest=JdbcIntegrationTest,org.apache.zeppelin.spark.*,org.apache.zeppelin.kotlin.* -DfailIfNoTests=false | |
spark-2-4-and-scala-2-12: | |
runs-on: ubuntu-20.04 | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v2 | |
- name: Tune Runner VM | |
uses: ./.github/actions/tune-runner-vm | |
- name: Set up JDK 8 | |
uses: actions/setup-java@v2 | |
with: | |
distribution: 'adopt' | |
java-version: 8 | |
- name: Cache local Maven repository | |
uses: actions/cache@v2 | |
with: | |
path: | | |
~/.m2/repository | |
!~/.m2/repository/org/apache/zeppelin/ | |
key: ${{ runner.os }}-zeppelin-${{ hashFiles('**/pom.xml') }} | |
restore-keys: | | |
${{ runner.os }}-zeppelin- | |
- name: Setup conda environment with python 3.7 and R | |
uses: conda-incubator/setup-miniconda@v2 | |
with: | |
activate-environment: python_3_with_R | |
environment-file: testing/env_python_3_with_R.yml | |
python-version: 3.7 | |
mamba-version: "*" | |
channels: conda-forge,defaults | |
channel-priority: true | |
auto-activate-base: false | |
use-mamba: true | |
- name: Make IRkernel available to Jupyter | |
run: | | |
R -e "IRkernel::installspec()" | |
- name: install environment | |
run: | | |
mvn install -DskipTests -DskipRat -pl spark-submit,spark/spark-dependencies -am -Pspark-2.4 -Pspark-scala-2.12 -Phadoop2 -B | |
- name: run tests | |
run: mvn test -DskipRat -pl spark-submit,spark/spark-dependencies -am -Pspark-2.4 -Pspark-scala-2.12 -Phadoop2 -B -Dtest=org.apache.zeppelin.spark.*,org.apache.zeppelin.kotlin.* -DfailIfNoTests=false | |
spark-3-0-and-scala-2-12-and-other-interpreter: | |
runs-on: ubuntu-20.04 | |
strategy: | |
fail-fast: false | |
matrix: | |
python: [ 3.7, 3.8 ] | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v2 | |
- name: Tune Runner VM | |
uses: ./.github/actions/tune-runner-vm | |
- name: Set up JDK 8 | |
uses: actions/setup-java@v2 | |
with: | |
distribution: 'adopt' | |
java-version: 8 | |
- name: Cache local Maven repository | |
uses: actions/cache@v2 | |
with: | |
path: | | |
~/.m2/repository | |
!~/.m2/repository/org/apache/zeppelin/ | |
key: ${{ runner.os }}-zeppelin-${{ hashFiles('**/pom.xml') }} | |
restore-keys: | | |
${{ runner.os }}-zeppelin- | |
- name: Setup conda environment with python ${{ matrix.python }} and R | |
uses: conda-incubator/setup-miniconda@v2 | |
with: | |
activate-environment: python_3_with_R | |
environment-file: testing/env_python_${{ matrix.python }}_with_R.yml | |
python-version: ${{ matrix.python }} | |
mamba-version: "*" | |
channels: conda-forge,defaults | |
channel-priority: true | |
auto-activate-base: false | |
use-mamba: true | |
- name: Make IRkernel available to Jupyter | |
run: | | |
R -e "IRkernel::installspec()" | |
- name: install environment | |
run: | | |
mvn install -DskipTests -DskipRat -pl spark-submit,spark/spark-dependencies -am -Pspark-3.0 -Pspark-scala-2.12 -Phadoop2 -B | |
- name: run tests with ${{ matrix.python }} | |
run: mvn test -DskipRat -pl spark-submit,spark/spark-dependencies -am -Pspark-3.0 -Pspark-scala-2.12 -Phadoop2 -B -Dtest=org.apache.zeppelin.spark.*,apache.zeppelin.python.*,apache.zeppelin.jupyter.*,apache.zeppelin.r.* -DfailIfNoTests=false | |
spark-3-1-and-scala-2-12-and-other-interpreter: | |
runs-on: ubuntu-20.04 | |
strategy: | |
fail-fast: false | |
matrix: | |
python: [ 3.7, 3.8 ] | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v2 | |
- name: Tune Runner VM | |
uses: ./.github/actions/tune-runner-vm | |
- name: Set up JDK 8 | |
uses: actions/setup-java@v2 | |
with: | |
distribution: 'adopt' | |
java-version: 8 | |
- name: Cache local Maven repository | |
uses: actions/cache@v2 | |
with: | |
path: | | |
~/.m2/repository | |
!~/.m2/repository/org/apache/zeppelin/ | |
key: ${{ runner.os }}-zeppelin-${{ hashFiles('**/pom.xml') }} | |
restore-keys: | | |
${{ runner.os }}-zeppelin- | |
- name: Setup conda environment with python ${{ matrix.python }} and R | |
uses: conda-incubator/setup-miniconda@v2 | |
with: | |
activate-environment: python_3_with_R | |
environment-file: testing/env_python_${{ matrix.python }}_with_R.yml | |
python-version: ${{ matrix.python }} | |
mamba-version: "*" | |
channels: conda-forge,defaults | |
channel-priority: true | |
auto-activate-base: false | |
use-mamba: true | |
- name: Make IRkernel available to Jupyter | |
run: | | |
R -e "IRkernel::installspec()" | |
- name: install environment | |
run: mvn install -DskipTests -DskipRat -pl spark-submit,spark/spark-dependencies -am -Pspark-3.1 -Pspark-scala-2.12 -Phadoop2 -B | |
- name: run tests with ${{ matrix.python }} | |
run: mvn test -DskipRat -pl spark-submit,spark/spark-dependencies -am -Pspark-3.1 -Pspark-scala-2.12 -Phadoop2 -B -Dtest=org.apache.zeppelin.spark.*,apache.zeppelin.python.*,apache.zeppelin.jupyter.*,apache.zeppelin.r.* -DfailIfNoTests=false | |
spark-3-2-and-scala-2-12-and-other-interpreter: | |
runs-on: ubuntu-20.04 | |
strategy: | |
fail-fast: false | |
matrix: | |
python: [ 3.7, 3.8 ] | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v2 | |
- name: Tune Runner VM | |
uses: ./.github/actions/tune-runner-vm | |
- name: Set up JDK 8 | |
uses: actions/setup-java@v2 | |
with: | |
distribution: 'adopt' | |
java-version: 8 | |
- name: Cache local Maven repository | |
uses: actions/cache@v2 | |
with: | |
path: | | |
~/.m2/repository | |
!~/.m2/repository/org/apache/zeppelin/ | |
key: ${{ runner.os }}-zeppelin-${{ hashFiles('**/pom.xml') }} | |
restore-keys: | | |
${{ runner.os }}-zeppelin- | |
- name: Setup conda environment with python ${{ matrix.python }} and R | |
uses: conda-incubator/setup-miniconda@v2 | |
with: | |
activate-environment: python_3_with_R | |
environment-file: testing/env_python_${{ matrix.python }}_with_R.yml | |
python-version: ${{ matrix.python }} | |
mamba-version: "*" | |
channels: conda-forge,defaults | |
channel-priority: true | |
auto-activate-base: false | |
use-mamba: true | |
- name: Make IRkernel available to Jupyter | |
run: | | |
R -e "IRkernel::installspec()" | |
- name: install environment | |
run: mvn install -DskipTests -DskipRat -pl spark-submit,spark/spark-dependencies -am -Pspark-3.2 -Pspark-scala-2.12 -Phadoop2 -B | |
- name: run tests with ${{ matrix.python }} | |
run: mvn test -DskipRat -pl spark-submit,spark/spark-dependencies -am -Pspark-3.2 -Pspark-scala-2.12 -Phadoop2 -B -Dtest=org.apache.zeppelin.spark.*,apache.zeppelin.python.*,apache.zeppelin.jupyter.*,apache.zeppelin.r.* -DfailIfNoTests=false | |
test-livy-0-5-with-spark-2-2-0-under-python3: | |
runs-on: ubuntu-20.04 | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v2 | |
- name: Tune Runner VM | |
uses: ./.github/actions/tune-runner-vm | |
- name: Set up JDK 8 | |
uses: actions/setup-java@v2 | |
with: | |
distribution: 'adopt' | |
java-version: 8 | |
- name: Cache local Maven repository | |
uses: actions/cache@v2 | |
with: | |
path: | | |
~/.m2/repository | |
!~/.m2/repository/org/apache/zeppelin/ | |
key: ${{ runner.os }}-zeppelin-${{ hashFiles('**/pom.xml') }} | |
restore-keys: | | |
${{ runner.os }}-zeppelin- | |
- name: Setup conda environment with python 3.7 and R | |
uses: conda-incubator/setup-miniconda@v2 | |
with: | |
activate-environment: python_3_with_R | |
environment-file: testing/env_python_3_with_R.yml | |
python-version: 3.7 | |
mamba-version: "*" | |
channels: conda-forge,defaults | |
channel-priority: true | |
auto-activate-base: false | |
use-mamba: true | |
- name: install environment | |
run: | | |
mvn install -DskipTests -DskipRat -pl livy -am -B | |
./testing/downloadSpark.sh "2.2.0" "2.6" | |
./testing/downloadLivy.sh "0.5.0-incubating" | |
- name: run tests | |
run: mvn verify -DskipRat -pl livy -am -B |