| # Licensed to the Apache Software Foundation (ASF) under one or more |
| # contributor license agreements. See the NOTICE file distributed with |
| # this work for additional information regarding copyright ownership. |
| # The ASF licenses this file to You under the Apache License, Version 2.0 |
| # (the "License"); you may not use this file except in compliance with |
| # the License. You may obtain a copy of the License at |
| # |
| # http://www.apache.org/licenses/LICENSE-2.0 |
| # |
| # Unless required by applicable law or agreed to in writing, software |
| # distributed under the License is distributed on an "AS IS" BASIS, |
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| # See the License for the specific language governing permissions and |
| # limitations under the License. |
| |
| version: "{build}-{branch}" |
| |
| shallow_clone: true |
| |
| platform: x64 |
| configuration: Debug |
| |
| branches: |
| only: |
| - master |
| |
| only_commits: |
| files: |
| - appveyor.yml |
| - dev/appveyor-install-dependencies.ps1 |
| - R/ |
| - sql/core/src/main/scala/org/apache/spark/sql/api/r/ |
| - core/src/main/scala/org/apache/spark/api/r/ |
| - mllib/src/main/scala/org/apache/spark/ml/r/ |
| - core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala |
| - bin/*.cmd |
| |
| cache: |
| - C:\Users\appveyor\.m2 |
| |
| install: |
| # Install maven and dependencies |
| - ps: .\dev\appveyor-install-dependencies.ps1 |
| # Required package for R unit tests |
| - cmd: R -e "install.packages(c('knitr', 'rmarkdown', 'e1071', 'survival'), repos='https://cloud.r-project.org/')" |
| # Use Arrow R 0.14.1 for now. 0.15.0 seems not working for now. See SPARK-29378. |
| - cmd: R -e "install.packages(c('assertthat', 'bit64', 'fs', 'purrr', 'R6', 'tidyselect'), repos='https://cloud.r-project.org/')" |
| - cmd: R -e "install.packages('https://cran.r-project.org/src/contrib/Archive/arrow/arrow_0.14.1.tar.gz', repos=NULL, type='source')" |
| # Here, we use the fixed version of testthat. For more details, please see SPARK-22817. |
| # As of devtools 2.1.0, it requires testthat higher then 2.1.1 as a dependency. SparkR test requires testthat 1.0.2. |
| # Therefore, we don't use devtools but installs it directly from the archive including its dependencies. |
| - cmd: R -e "install.packages(c('crayon', 'praise', 'R6'), repos='https://cloud.r-project.org/')" |
| - cmd: R -e "install.packages('https://cloud.r-project.org/src/contrib/Archive/testthat/testthat_1.0.2.tar.gz', repos=NULL, type='source')" |
| - cmd: R -e "packageVersion('knitr'); packageVersion('rmarkdown'); packageVersion('testthat'); packageVersion('e1071'); packageVersion('survival'); packageVersion('arrow')" |
| |
| build_script: |
| # '-Djna.nosys=true' is required to avoid kernel32.dll load failure. |
| # See SPARK-28759. |
| - cmd: mvn -DskipTests -Psparkr -Phive -Djna.nosys=true package |
| |
| environment: |
| NOT_CRAN: true |
| # See SPARK-27848. Currently installing some dependent packages causes |
| # "(converted from warning) unable to identify current timezone 'C':" for an unknown reason. |
| # This environment variable works around to test SparkR against a higher version. |
| R_REMOTES_NO_ERRORS_FROM_WARNINGS: true |
| |
| test_script: |
| - cmd: .\bin\spark-submit2.cmd --driver-java-options "-Dlog4j.configuration=file:///%CD:\=/%/R/log4j.properties" --conf spark.hadoop.fs.defaultFS="file:///" R\pkg\tests\run-all.R |
| |
| notifications: |
| - provider: Email |
| on_build_success: false |
| on_build_failure: false |
| on_build_status_changed: false |