Skip to content

Commit f30d7d3

Browse files
committed
Merge branch 'master' into issues/SPARK-16167
2 parents be6ed48 + e6bef7d commit f30d7d3

File tree

1,375 files changed

+41049
-20153
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,375 files changed

+41049
-20153
lines changed

.gitignore

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717
.idea/
1818
.idea_modules/
1919
.project
20+
.pydevproject
2021
.scala_dependencies
2122
.settings
2223
/lib/
@@ -77,3 +78,8 @@ spark-warehouse/
7778
# For R session data
7879
.RData
7980
.RHistory
81+
.Rhistory
82+
*.Rproj
83+
*.Rproj.*
84+
85+
.Rproj.user

R/check-cran.sh

Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
#!/bin/bash
2+
3+
#
4+
# Licensed to the Apache Software Foundation (ASF) under one or more
5+
# contributor license agreements. See the NOTICE file distributed with
6+
# this work for additional information regarding copyright ownership.
7+
# The ASF licenses this file to You under the Apache License, Version 2.0
8+
# (the "License"); you may not use this file except in compliance with
9+
# the License. You may obtain a copy of the License at
10+
#
11+
# http://www.apache.org/licenses/LICENSE-2.0
12+
#
13+
# Unless required by applicable law or agreed to in writing, software
14+
# distributed under the License is distributed on an "AS IS" BASIS,
15+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16+
# See the License for the specific language governing permissions and
17+
# limitations under the License.
18+
#
19+
20+
set -o pipefail
21+
set -e
22+
23+
FWDIR="$(cd `dirname $0`; pwd)"
24+
pushd $FWDIR > /dev/null
25+
26+
if [ ! -z "$R_HOME" ]
27+
then
28+
R_SCRIPT_PATH="$R_HOME/bin"
29+
else
30+
# if system wide R_HOME is not found, then exit
31+
if [ ! `command -v R` ]; then
32+
echo "Cannot find 'R_HOME'. Please specify 'R_HOME' or make sure R is properly installed."
33+
exit 1
34+
fi
35+
R_SCRIPT_PATH="$(dirname $(which R))"
36+
fi
37+
echo "USING R_HOME = $R_HOME"
38+
39+
# Build the latest docs
40+
$FWDIR/create-docs.sh
41+
42+
# Build a zip file containing the source package
43+
"$R_SCRIPT_PATH/"R CMD build $FWDIR/pkg
44+
45+
# Run check as-cran.
46+
# TODO(shivaram): Remove the skip tests once we figure out the install mechanism
47+
48+
VERSION=`grep Version $FWDIR/pkg/DESCRIPTION | awk '{print $NF}'`
49+
50+
"$R_SCRIPT_PATH/"R CMD check --as-cran SparkR_"$VERSION".tar.gz
51+
52+
popd > /dev/null

R/pkg/.Rbuildignore

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
^.*\.Rproj$
2+
^\.Rproj\.user$
3+
^\.lintr$
4+
^src-native$
5+
^html$

R/pkg/DESCRIPTION

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,18 @@
11
Package: SparkR
22
Type: Package
3-
Title: R frontend for Spark
3+
Title: R Frontend for Apache Spark
44
Version: 2.0.0
5-
Date: 2013-09-09
5+
Date: 2016-07-07
66
Author: The Apache Software Foundation
77
Maintainer: Shivaram Venkataraman <[email protected]>
8-
Imports:
9-
methods
108
Depends:
119
R (>= 3.0),
12-
methods,
10+
methods
1311
Suggests:
1412
testthat,
1513
e1071,
1614
survival
17-
Description: R frontend for Spark
15+
Description: The SparkR package provides an R frontend for Apache Spark.
1816
License: Apache License (== 2.0)
1917
Collate:
2018
'schema.R'
@@ -33,6 +31,7 @@ Collate:
3331
'context.R'
3432
'deserialize.R'
3533
'functions.R'
34+
'install.R'
3635
'mllib.R'
3736
'serialize.R'
3837
'sparkR.R'

R/pkg/NAMESPACE

Lines changed: 19 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ export("sparkR.session")
1010
export("sparkR.init")
1111
export("sparkR.stop")
1212
export("sparkR.session.stop")
13+
export("sparkR.conf")
1314
export("print.jobj")
1415

1516
export("sparkRSQL.init",
@@ -23,7 +24,9 @@ exportMethods("glm",
2324
"spark.kmeans",
2425
"fitted",
2526
"spark.naiveBayes",
26-
"spark.survreg")
27+
"spark.survreg",
28+
"spark.isoreg",
29+
"spark.gaussianMixture")
2730

2831
# Job group lifecycle management methods
2932
export("setJobGroup",
@@ -68,6 +71,7 @@ exportMethods("arrange",
6871
"first",
6972
"freqItems",
7073
"gapply",
74+
"gapplyCollect",
7175
"group_by",
7276
"groupBy",
7377
"head",
@@ -233,6 +237,7 @@ exportMethods("%in%",
233237
"over",
234238
"percent_rank",
235239
"pmod",
240+
"posexplode",
236241
"quarter",
237242
"rand",
238243
"randn",
@@ -338,5 +343,16 @@ export("partitionBy",
338343
"rowsBetween",
339344
"rangeBetween")
340345

341-
export("window.partitionBy",
342-
"window.orderBy")
346+
export("windowPartitionBy",
347+
"windowOrderBy")
348+
349+
S3method(print, jobj)
350+
S3method(print, structField)
351+
S3method(print, structType)
352+
S3method(print, summary.GeneralizedLinearRegressionModel)
353+
S3method(structField, character)
354+
S3method(structField, jobj)
355+
S3method(structType, jobj)
356+
S3method(structType, structField)
357+
358+
export("install.spark")

0 commit comments

Comments
 (0)