Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
263 commits
Select commit Hold shift + click to select a range
7c3c5a2
[SPARK-25899][TESTS] Fix flaky CoarseGrainedSchedulerBackendSuite
zsxwing Oct 31, 2018
deb205c
[SPARK-25837][CORE] Fix potential slowdown in AppStatusListener when …
patrickbrownsync Nov 1, 2018
a9532ca
[SPARK-25918][SQL] LOAD DATA LOCAL INPATH should handle a relative path
dongjoon-hyun Nov 2, 2018
ab6432a
[SPARK-25023] Clarify Spark security documentation
Nov 2, 2018
dd527e9
[SPARK-25909] fix documentation on cluster managers
jameslamb Nov 2, 2018
dfe321c
[SPARK-25827][CORE] Avoid converting incoming encrypted blocks to byt…
squito Nov 2, 2018
478cada
[SPARK-25933][DOCUMENTATION] Fix pstats.Stats() reference in configur…
Nov 3, 2018
02028ed
[SPARK-25930][K8S] Fix scala string detection in k8s tests
Nov 5, 2018
d946080
[MINOR] Fix typos and misspellings
srowen Nov 5, 2018
e564ff1
[SPARK-25906][SHELL] Documents '-I' option (from Scala REPL) in spark…
HyukjinKwon Nov 6, 2018
17c7387
[SPARK-22148][SPARK-15815][SCHEDULER] Acquire new executors to avoid …
dhruve Nov 6, 2018
76c02e1
[SPARK-25866][ML] Update KMeans formatVersion
mgaido91 Nov 6, 2018
ccd0fe7
[MINOR] update known_translations
cloud-fan Nov 6, 2018
3c1a4d3
Revert "[SPARK-23831][SQL] Add org.apache.derby to IsolatedClientLoader"
HyukjinKwon Nov 8, 2018
d3a06f9
[SPARK-25904][CORE] Allocate arrays smaller than Int.MaxValue
squito Nov 7, 2018
abce62d
hot fix: add missing import
cloud-fan Nov 8, 2018
9429c02
[MINOR] update HiveExternalCatalogVersionsSuite to test 2.4.0
cloud-fan Nov 8, 2018
033c1db
[SPARK-25988][SQL] Keep names unchanged when deduplicating the column…
gatorsmile Nov 9, 2018
29945da
[SPARK-25979][SQL] Window function: allow parentheses around window r…
gengliangwang Nov 9, 2018
ae5526d
[SPARK-26029][BUILD][2.4] Bump previousSparkVersion in MimaBuild.scal…
cloud-fan Nov 13, 2018
58a12aa
[SPARK-26010][R] fix vignette eval with Java 11
felixcheung Nov 13, 2018
69b7933
[SPARK-25921][PYSPARK] Fix barrier task run without BarrierTaskContex…
xuanyuanking Nov 13, 2018
1ea079c
[SPARK-26042][SS][TESTS] Fix a potential hang in KafkaContinuousSourc…
zsxwing Nov 14, 2018
b65afa6
[SPARK-25778] WriteAheadLogBackedBlockRDD in YARN Cluster Mode Fails …
Nov 14, 2018
4753c10
[SPARK-26057][SQL] Transform also analyzed plans when dedup references
mgaido91 Nov 15, 2018
cc53fe0
[SPARK-26011][SPARK-SUBMIT] Yarn mode pyspark app without python main…
shanyu Nov 15, 2018
1a3251a
[SPARK-25883][BACKPORT][SQL][MINOR] Override method `prettyName` in `…
gengliangwang Nov 16, 2018
61ab872
[SPARK-23207][SQL][FOLLOW-UP] Use `SQLConf.get.enableRadixSort` inste…
ueshin Nov 16, 2018
6488048
[SPARK-25023] More detailed security guidance for K8S
rvesse Nov 16, 2018
7c4b724
[SPARK-25934][MESOS] Don't propagate SPARK_CONF_DIR from spark submit
mpmolek Nov 16, 2018
2314926
[SPARK-26069][TESTS] Fix flaky test: RpcIntegrationSuite.sendRpcWithS…
zsxwing Nov 16, 2018
6e34a20
[SPARK-26092][SS] Use CheckpointFileManager to write the streaming me…
zsxwing Nov 16, 2018
ef32277
[SPARK-26079][SQL] Ensure listener event delivery in StreamingQueryLi…
Nov 17, 2018
5cd1baa
[MINOR][SQL] Fix typo in CTAS plan database string
dongjoon-hyun Nov 17, 2018
9e2832e
[SPARK-26118][WEB UI] Introducing spark.ui.requestHeaderSize for sett…
attilapiros Nov 20, 2018
1017815
[SPARK-26084][SQL] Fixes unresolved AggregateExpression.references ex…
ssimeonov Nov 20, 2018
cf6e27b
[SPARK-26120][TESTS][SS][SPARKR] Fix a streaming query leak in Struct…
zsxwing Nov 21, 2018
134865f
[SPARK-26109][WEBUI] Duration in the task summary metrics table and t…
shahidki31 Nov 21, 2018
a0e8bb0
[SPARK-26021][SQL] replace minus zero with zero in Platform.putDouble…
Nov 23, 2018
9325fe2
[SPARK-24553][UI][FOLLOWUP][2.4 BACKPORT] Fix unnecessary UI redirect
jerryshao Nov 23, 2018
6830f28
[SPARK-26069][TESTS][FOLLOWUP] Add another possible error message
zsxwing Nov 23, 2018
a0bfb00
[SPARK-25786][CORE] If the ByteBuffer.hasArray is false , it will thr…
10110346 Nov 24, 2018
cd562c0
[MINOR][K8S] Invalid property "spark.driver.pod.name" is referenced i…
Leemoonsoo Nov 25, 2018
d75c257
[SPARK-25451][SPARK-26100][CORE] Aggregated metrics table doesn't sho…
shahidki31 Nov 26, 2018
023a8c7
[SPARK-26114][CORE] ExternalSorter's readingIterator field leak
szhem Nov 28, 2018
e8d47df
[SPARK-26147][SQL] only pull out unevaluable python udf from join con…
cloud-fan Nov 28, 2018
0c9b7ae
[SPARK-26137][CORE] Use Java system property "file.separator" inste…
Nov 28, 2018
08b9b67
[SPARK-26211][SQL] Fix InSet for binary, and struct and array with null.
ueshin Nov 29, 2018
6478244
[SPARK-26186][SPARK-26184][CORE] Last updated time is not getting upd…
shahidki31 Nov 29, 2018
ff5afba
[SPARK-26188][SQL] FileIndex: don't infer data types of partition col…
gengliangwang Nov 30, 2018
ea1ae55
[SPARK-26201] Fix python broadcast with encryption
Nov 30, 2018
31447e2
[MINOR][DOC] Correct some document description errors
10110346 Dec 1, 2018
becbf8a
[SPARK-26080][PYTHON] Skips Python resource limit on Windows in Pytho…
HyukjinKwon Dec 2, 2018
f7bf694
[SPARK-26198][SQL] Fix Metadata serialize null values throw NPE
wangyum Dec 2, 2018
540d576
[SPARK-26181][SQL] the `hasMinMaxStats` method of `ColumnStatsMap` is…
adrian-wang Dec 3, 2018
eb45976
[SPARK-26256][K8S] Fix labels for pod deletion
Dec 3, 2018
57ffaf9
[SPARK-26219][CORE][BRANCH-2.4] Executor summary should get updated f…
shahidki31 Dec 3, 2018
765418c
[SPARK-24423][FOLLOW-UP][SQL] Fix error example
wangyum Dec 4, 2018
044f222
[SPARK-26119][CORE][WEBUI] Task summary table should contain only suc…
shahidki31 Dec 4, 2018
f7a2904
[SPARK-26133][ML][FOLLOWUP] Fix doc for OneHotEncoder
viirya Dec 5, 2018
5ceed47
Revert "[SPARK-26133][ML][FOLLOWUP] Fix doc for OneHotEncoder"
HyukjinKwon Dec 5, 2018
da6ec0b
[SPARK-26233][SQL][BACKPORT-2.4] CheckOverflow when encoding a decima…
mgaido91 Dec 5, 2018
395c302
[SPARK-26266][BUILD] Update to Scala 2.12.8 (branch-2.4)
srowen Dec 8, 2018
8307d42
[SPARK-25132][SQL][FOLLOWUP][DOC] Add migration doc for case-insensit…
seancxmao Dec 9, 2018
a79b821
[SPARK-26021][2.4][SQL][FOLLOWUP] only deal with NaN and -0.0 in Unsa…
cloud-fan Dec 9, 2018
2234551
[SPARK-26307][SQL] Fix CTAS when INSERT a partitioned table using Hiv…
gatorsmile Dec 10, 2018
dc299c2
[SPARK-26327][SQL][BACKPORT-2.4] Bug fix for `FileSourceScanExec` met…
xuanyuanking Dec 11, 2018
e92c40b
[SPARK-26265][CORE][BRANCH-2.4] Fix deadlock in BytesToBytesMap.MapIt…
viirya Dec 11, 2018
ac38114
This is a dummy commit to trigger AFS gitbox sync
dongjoon-hyun Dec 11, 2018
21016ec
[SPARK-26355][PYSPARK] Add a workaround for PyArrow 0.11.
ueshin Dec 13, 2018
b36254e
[SPARK-26370][SQL] Fix resolution of higher-order function for the sa…
ueshin Dec 14, 2018
27822ff
[SPARK-26265][CORE][FOLLOWUP] Put freePage into a finally block
viirya Dec 15, 2018
f80e9ff
[SPARK-26315][PYSPARK] auto cast threshold from Integer to Float in a…
Dec 15, 2018
abf8088
[SPARK-26316][SPARK-21052][BRANCH-2.4] Revert hash join metrics in th…
JkSelf Dec 17, 2018
dd35c89
[SPARK-26352][SQL] join reorder should not change the order of output…
rednaxelafx Dec 17, 2018
c012c00
[SPARK-26352][SQL][FOLLOWUP-2.4] Fix missing sameOutput in branch-2.4
rednaxelafx Dec 17, 2018
9cdd34a
[SPARK-26382][CORE] prefix comparator should handle -0.0
cloud-fan Dec 18, 2018
02bad18
[SPARK-26394][CORE] Fix annotation error for Utils.timeStringAsMs
Dec 18, 2018
45d7884
[SPARK-26366][SQL] ReplaceExceptWithFilter should consider NULL as False
mgaido91 Dec 19, 2018
0ed69b3
[SPARK-24687][CORE] Avoid job hanging when generate task binary cause…
caneGuy Dec 20, 2018
3da63d8
[SPARK-26392][YARN] Cancel pending allocate requests by taking locali…
Ngone51 Dec 20, 2018
8325fe4
[SPARK-26409][SQL][TESTS] SQLConf should be serializable in test sess…
gengliangwang Dec 20, 2018
b28de54
[SPARK-26422][R] Support to disable Hive support in SparkR even for H…
HyukjinKwon Dec 21, 2018
ddbef42
[SPARK-26402][SQL] Accessing nested fields with different cases in ca…
dbtsai Dec 22, 2018
3bd3ca3
[SPARK-26426][SQL] fix ExpresionInfo assert error in windows operatio…
Dec 25, 2018
a0b9500
Revert [SPARK-26021][SQL] replace minus zero with zero in Platform.pu…
cloud-fan Dec 27, 2018
9632058
[SPARK-26444][WEBUI] Stage color doesn't change with it's status
seancxmao Dec 28, 2018
6612588
[SPARK-26496][SS][TEST] Avoid to use Random.nextString in StreamingIn…
HyukjinKwon Dec 29, 2018
fbb8ea3
[SPARK-26019][PYSPARK] Allow insecure py4j gateways
squito Jan 3, 2019
cfaf233
[SPARK-26501][CORE][TEST] Fix unexpected overriden of exitFn in Spark…
Jan 3, 2019
5c06e00
[MINOR][NETWORK][TEST] Fix TransportFrameDecoderSuite to use ByteBuf …
dongjoon-hyun Jan 4, 2019
1cc9e2a
[SPARK-26078][SQL][BACKPORT-2.4] Dedup self-join attributes on IN sub…
mgaido91 Jan 4, 2019
cff6429
[SPARK-26537][BUILD] change git-wip-us to gitbox
shaneknapp Jan 5, 2019
aceb5e5
[SPARK-26545] Fix typo in EqualNullSafe's truth table comment
rednaxelafx Jan 5, 2019
2891de4
[SPARK-26554][BUILD] Update `release-util.sh` to avoid GitBox fake 20…
dongjoon-hyun Jan 7, 2019
dfdde8e
[MINOR][BUILD] Fix script name in `release-tag.sh` usage message
dongjoon-hyun Jan 7, 2019
6418dd5
[SPARK-26559][ML][PYSPARK] ML image can't work with numpy versions pr…
viirya Jan 7, 2019
66cdfb7
[SPARK-26269][YARN][BRANCH-2.4] Yarnallocator should have same blackl…
Ngone51 Jan 7, 2019
a4577fc
[SPARK-26267][SS] Retry when detecting incorrect offsets from Kafka (…
zsxwing Jan 8, 2019
c31c110
[SPARK-26554][BUILD][FOLLOWUP] Use GitHub instead of GitBox to check …
dongjoon-hyun Jan 8, 2019
a9275aa
[SPARK-26571][SQL] Update Hive Serde mapping with canonical name of P…
gengliangwang Jan 9, 2019
cce8806
[SPARK-22128][CORE][BUILD] Add `paranamer` dependency to `core` module
dongjoon-hyun Jan 10, 2019
ddad358
[SPARK-26576][SQL] Broadcast hint not applied to partitioned table
jzhuge Jan 11, 2019
9b4d3cd
[SPARK-26551][SQL] Fix schema pruning error when selecting one comple…
viirya Jan 11, 2019
ad02392
[SPARK-26586][SS] Fix race condition that causes streams to run with …
mukulmurthy Jan 11, 2019
9c0e2f4
[SPARK-26607][SQL][TEST] Remove Spark 2.2.x testing from HiveExternal…
dongjoon-hyun Jan 12, 2019
d2ab1db
[SPARK-26538][SQL] Set default precision and scale for elements of po…
a-shkarupin Jan 12, 2019
068b5e9
[MINOR][BUILD] Remove binary license/notice files in a source release…
maropu Jan 15, 2019
cb6ed9f
[SPARK-26615][CORE] Fixing transport server/client resource leaks in …
attilapiros Jan 16, 2019
c4a1f98
[MINOR][PYTHON] Fix SQLContext to SparkSession in Python API main page
HyukjinKwon Jan 16, 2019
2e36e54
[SPARK-25992][PYTHON] Document SparkContext cannot be shared for mult…
HyukjinKwon Jan 16, 2019
a0eabf6
[SPARK-26629][SS] Fixed error with multiple file stream in a query + …
tdas Jan 16, 2019
f3e6736
[SPARK-26633][REPL] Add ExecutorClassLoader.getResourceAsStream
rednaxelafx Jan 16, 2019
9f55238
[SPARK-26638][PYSPARK][ML] Pyspark vector classes always return error…
srowen Jan 17, 2019
2523137
[SPARK-26351][MLLIB] Update doc and minor correction in the mllib eva…
shahidki31 Jan 21, 2019
cf55bcb
[SPARK-26665][CORE] Fix a bug that BlockTransferService.fetchBlockSyn…
zsxwing Jan 22, 2019
0e1d0bb
[SPARK-26605][YARN] Update AM's credentials when creating tokens.
Jan 23, 2019
41d0645
[SPARK-26228][MLLIB] OOM issue encountered when computing Gramian matrix
srowen Jan 23, 2019
55f83da
[SPARK-26706][SQL] Fix `illegalNumericPrecedence` for ByteType
aokolnychyi Jan 24, 2019
efc3421
[SPARK-26682][SQL] Use taskAttemptID instead of attemptNumber for Had…
rdblue Jan 24, 2019
215bbaa
[SPARK-26680][SQL] Eagerly create inputVars while conditions are appr…
bersprockets Jan 24, 2019
6674aa2
[SPARK-26709][SQL] OptimizeMetadataOnlyQuery does not handle empty re…
gengliangwang Jan 25, 2019
5d90583
[SPARK-26427][BUILD][BACKPORT-2.4] Upgrade Apache ORC to 1.5.4
dongjoon-hyun Jan 25, 2019
ff8ec46
[SPARK-26379][SS] Fix issue on adding current_timestamp/current_date …
HeartSaVioR Jan 25, 2019
223d173
[SPARK-26379][SS][FOLLOWUP] Use dummy TimeZoneId to avoid UnresolvedE…
dongjoon-hyun Jan 27, 2019
3d53777
[SPARK-26708][SQL][BRANCH-2.4] Incorrect result caused by inconsisten…
maryannxue Jan 29, 2019
73a9426
[SPARK-26718][SS][BRANCH-2.4] Fixed integer overflow in SS kafka rate…
Jan 29, 2019
306e9bb
[SPARK-26732][CORE][TEST] Wait for listener bus to process events in …
Jan 30, 2019
4360df8
[SPARK-26726] Synchronize the amount of memory used by the broadcast …
httfighter Jan 31, 2019
2a3ece5
[SPARK-26757][GRAPHX] Return 0 for `count` on empty Edge/Vertex RDDs
huonw Jan 31, 2019
b0c607b
[SPARK-26745][SPARK-24959][SQL][BRANCH-2.4] Revert count optimization…
HyukjinKwon Feb 1, 2019
82f6e61
[SPARK-26806][SS] EventTimeStats.merge should handle zeros correctly
zsxwing Feb 1, 2019
9d80735
[SPARK-26677][BUILD] Update Parquet to 1.10.1 with notEq pushdown fix.
rdblue Feb 2, 2019
8a927b3
[SPARK-26751][SQL] Fix memory leak when statement run in background a…
caneGuy Feb 3, 2019
6f42c24
[SPARK-26758][CORE] Idle Executors are not getting killed after spark…
sandeep-katta Feb 5, 2019
e66e84f
[SPARK-26677][FOLLOWUP][BRANCH-2.4] Update Parquet manifest with Hado…
dongjoon-hyun Feb 6, 2019
49bd92f
[SPARK-26734][STREAMING] Fix StackOverflowError with large block queue
rlodge Feb 6, 2019
2b07c1c
[SPARK-26082][MESOS] Fix mesos fetch cache config name
Feb 7, 2019
5767598
[SPARK-26082][MESOS][FOLLOWUP] Add UT on fetcher cache option on Meso…
HeartSaVioR Feb 7, 2019
45f9d5b
Revert "[SPARK-26082][MESOS][FOLLOWUP] Add UT on fetcher cache option…
dongjoon-hyun Feb 9, 2019
0b5815d
[SPARK-26082][MESOS][FOLLOWUP][BRANCH-2.4] Add UT on fetcher cache op…
HeartSaVioR Feb 10, 2019
23155e2
Preparing Spark release v2.4.1-rc1
dbtsai Feb 12, 2019
f394f27
Preparing development version 2.4.2-SNAPSHOT
dbtsai Feb 12, 2019
e0f2118
[MINOR][DOCS] Fix for contradiction in condition formula of keeping i…
Feb 13, 2019
cf981d6
[SPARK-26572][SQL] fix aggregate codegen result evaluation
peter-toth Feb 14, 2019
5e61029
[SPARK-26873][SQL] Use a consistent timestamp to build Hadoop Job IDs.
rdblue Feb 14, 2019
9d4acd1
[SPARK-25922][K8S] Spark Driver/Executor "spark-app-selector" label m…
suxingfate Feb 15, 2019
24bb3af
[SPARK-26864][SQL][BACKPORT-2.4] Query may return incorrect result wh…
dilipbiswal Feb 16, 2019
c83381e
[SPARK-26897][SQL][TEST] Update Spark 2.3.x testing from HiveExternal…
maropu Feb 17, 2019
ed8a186
[SPARK-26897][SQL][TEST][FOLLOW-UP] Remove workaround for 2.2.0 and 2…
maropu Feb 18, 2019
8350d85
[SPARK-26740][SQL][BRANCH-2.4] Read timestamp/date column stats writt…
MaxGekk Feb 19, 2019
181d5c8
[MINOR][DOCS] Fix the update rule in StreamingKMeansModel documentation
joelgenter Feb 19, 2019
2fdb249
Preparing Spark release v2.4.1-rc2
dbtsai Feb 19, 2019
7e67dc9
Preparing development version 2.4.2-SNAPSHOT
dbtsai Feb 19, 2019
79b31ad
[SPARK-26859][SQL] Fix field writer index bug in non-vectorized ORC d…
IvanVergiliev Feb 20, 2019
5da5b53
Preparing Spark release v2.4.1-rc3
dbtsai Feb 21, 2019
d459971
Preparing development version 2.4.2-SNAPSHOT
dbtsai Feb 21, 2019
3830b2f
[R][BACKPORT-2.4] update package description
felixcheung Feb 21, 2019
d63461f
Preparing Spark release v2.4.1-rc4
dbtsai Feb 21, 2019
d49a9b8
Preparing development version 2.4.2-SNAPSHOT
dbtsai Feb 21, 2019
3bc4339
[R][BACKPORT-2.3] update package description
felixcheung Feb 22, 2019
ca2ee72
Revert "[R][BACKPORT-2.3] update package description"
HyukjinKwon Feb 22, 2019
cb9afbf
[SPARK-26950][SQL][TEST] Make RandomDataGenerator use Float.NaN or Do…
dongjoon-hyun Feb 22, 2019
f756f35
Preparing Spark release v2.4.1-rc5
dbtsai Feb 22, 2019
6d41905
Preparing development version 2.4.2-SNAPSHOT
dbtsai Feb 22, 2019
d41180f
[MINOR][BUILD] Update all checkstyle dtd to use "https://checkstyle.org"
HeartSaVioR Feb 25, 2019
da7ae52
[SPARK-26990][SQL][BACKPORT-2.4] FileIndex: use user specified field …
gengliangwang Feb 28, 2019
9c152b2
[SPARK-27046][DSTREAMS] Remove SPARK-19185 related references from do…
gaborgsomogyi Mar 4, 2019
db6c470
[MINOR][DOCS] Clarify that Spark apps should mark Spark as a 'provide…
srowen Mar 5, 2019
168b510
[SPARK-26932][DOC] Add a warning for Hive 2.1.1 ORC reader issue
haiboself Mar 5, 2019
008c17b
[SPARK-24669][SQL] Invalidate tables in case of DROP DATABASE CASCADE
Udbhav30 Mar 6, 2019
890dcd7
[SPARK-23433][SPARK-25250][CORE] Later created TaskSet should learn a…
Ngone51 Mar 6, 2019
6b0734e
[SPARK-27065][CORE] avoid more than one active task set managers for …
cloud-fan Mar 6, 2019
21fe638
[SPARK-27019][SQL][WEBUI] onJobStart happens after onExecutionEnd sho…
shahidki31 Mar 6, 2019
6886b5d
[SPARK-27078][SQL] Fix NoSuchFieldError when read Hive materialized v…
wangyum Mar 7, 2019
08c4494
[SPARK-25863][SPARK-21871][SQL] Check if code size statistics is empt…
maropu Mar 7, 2019
b2a4a28
[SPARK-26604][CORE][BACKPORT-2.4] Clean up channel registration for S…
viirya Mar 8, 2019
c536d99
Preparing Spark release v2.4.1-rc7
dbtsai Mar 8, 2019
03ac617
Preparing development version 2.4.2-SNAPSHOT
dbtsai Mar 8, 2019
d7a7d72
[SPARK-27080][SQL] bug fix: mergeWithMetastoreSchema with uniform low…
Mar 9, 2019
37de7cf
[SPARK-27111][SS] Fix a race that a continuous query may fail with In…
zsxwing Mar 9, 2019
40dbaad
[SPARK-27097][CHERRY-PICK 2.4] Avoid embedding platform-dependent off…
rednaxelafx Mar 10, 2019
ac2b63a
Preparing Spark release v2.4.1-rc8
dbtsai Mar 10, 2019
4d4b271
Preparing development version 2.4.2-SNAPSHOT
dbtsai Mar 10, 2019
56956ae
[SPARK-26927][CORE] Ensure executor is active when processing events …
Mar 12, 2019
a687e3a
[MINOR][CORE] Use https for bintray spark-packages repository
HeartSaVioR Mar 12, 2019
0e27aba
[SPARK-26742][K8S][BRANCH-2.4] Update k8s client version to 4.1.2
Mar 14, 2019
07d64e2
[SPARK-27165][SPARK-27107][BRANCH-2.4][BUILD][SQL] Upgrade Apache ORC…
dongjoon-hyun Mar 15, 2019
b7a0ab3
[SPARK-27134][SQL] array_distinct function does not work correctly wi…
dilipbiswal Mar 16, 2019
0a38155
[SPARK-27178][K8S][BRANCH-2.4] adding nss package to fix tests
shaneknapp Mar 18, 2019
f9bf01b
[SPARK-27112][CORE] : Create a resource ordering between threads to r…
Mar 19, 2019
19cd429
[SPARK-26606][CORE] Handle driver options properly when submitting to…
HeartSaVioR Mar 22, 2019
9e6f70d
[SPARK-27160][SQL] Fix DecimalType when building orc filters
da-liii Mar 20, 2019
bcce6ba
Revert "[SPARK-26606][CORE] Handle driver options properly when submi…
Mar 23, 2019
bad170c
[SPARK-24935][SQL] fix Hive UDAF with two aggregation buffers
Mar 24, 2019
22cb9ad
Revert "Revert "[SPARK-26606][CORE] Handle driver options properly wh…
HeartSaVioR Mar 24, 2019
395ed1e
[SPARK-27094][YARN][BRANCH-2.4] Work around RackResolver swallowing t…
Mar 25, 2019
56eeac8
[SPARK-27198][CORE] Heartbeat interval mismatch in driver and executor
ajithme Mar 25, 2019
4fde57b
[SPARK-27274][DOCS] Fix references to scala 2.11 in 2.4.1+ docs; Note…
srowen Mar 26, 2019
3a00c1d
[SPARK-26961][CORE] Enable parallel classloading capability
ajithme Mar 26, 2019
4bd7546
Preparing Spark release v2.4.1-rc9
dbtsai Mar 26, 2019
84cfde0
Preparing development version 2.4.2-SNAPSHOT
dbtsai Mar 26, 2019
b2c9679
[SPARK-27275][CORE] Fix potential corruption in EncryptedMessage.tran…
zsxwing Mar 28, 2019
a56a587
[SPARK-27244][CORE] Redact Passwords While Using Option logConf=true
Mar 29, 2019
f4382a1
[SPARK-27301][DSTREAM] Shorten the FileSystem cached life cycle to th…
yaooqinn Mar 30, 2019
e792ceb
[SPARK-27267][CORE] Update snappy to avoid error when decompressing e…
srowen Mar 30, 2019
dba2681
[SPARK-27267][FOLLOWUP][BRANCH-2.4] Update hadoop-2.6 dependency mani…
dongjoon-hyun Mar 31, 2019
95365ec
[MINOR][R] fix R project description
felixcheung Mar 31, 2019
e027748
[SPARK-27244][CORE][TEST][FOLLOWUP] toDebugString redacts sensitive i…
gatorsmile Mar 31, 2019
64d7e9c
[SPARK-26998][CORE] Remove SSL configuration from executors
gaborgsomogyi Apr 2, 2019
a680b2b
[SPARK-27346][SQL] Loosen the newline assert condition on 'examples' …
HyukjinKwon Apr 2, 2019
2d83c89
[MINOR][DOC][SQL] Remove out-of-date doc about ORC in DataFrameReader…
viirya Apr 3, 2019
07702ab
[SPARK-27338][CORE] Fix deadlock in UnsafeExternalSorter.SpillableIte…
Apr 4, 2019
793cb7b
[SPARK-27338][CORE][FOLLOWUP] remove trailing space
cloud-fan Apr 4, 2019
3e76241
[SPARK-27382][SQL][TEST] Update Spark 2.4.x testing in HiveExternalCa…
dongjoon-hyun Apr 4, 2019
96c6146
[SPARK-27216][CORE][BACKPORT-2.4] Upgrade RoaringBitmap to 0.7.45 to …
LantaoJin Apr 4, 2019
80b1394
[MINOR][DOC] Fix html tag broken in configuration.md
HeartSaVioR Apr 5, 2019
dcdf835
[SPARK-27358][UI] Update jquery to 1.12.x to pick up security fixes
srowen Apr 5, 2019
38a8cf2
[SPARK-27390][CORE][SQL][TEST] Fix package name mismatch
dongjoon-hyun Apr 5, 2019
aab4f56
[SPARK-27391][SS] Don't initialize a lazy val in ContinuousExecution …
jose-torres Apr 5, 2019
9590c46
[SPARK-27419][CORE] Avoid casting heartbeat interval to seconds (2.4)
zsxwing Apr 10, 2019
caa6c5a
[SPARK-27406][SQL] UnsafeArrayData serialization breaks when two machi…
Apr 10, 2019
fb3263b
[SPARK-27394][WEBUI] Flush LiveEntity if necessary when receiving Spa…
zsxwing Apr 10, 2019
fffc344
Revert "[SPARK-23433][SPARK-25250][CORE] Later created TaskSet should…
cloud-fan Apr 14, 2019
bd5f3aa
[SPARK-27351][SQL] Wrong outputRows estimation after AggregateEstimat…
pengbo Apr 15, 2019
4eabe0c
[SPARK-27453] Pass partitionBy as options in DataFrameWriter
liwensun Apr 16, 2019
7726172
[SPARK-27479][BUILD] Hide API docs for org.apache.spark.util.kvstore
gatorsmile Apr 17, 2019
527fe1d
[SPARK-27403][SQL] Fix `updateTableStats` to update table stats alway…
sujith71955 Apr 11, 2019
b8c95d5
Preparing Spark release v2.4.2-rc1
cloud-fan Apr 18, 2019
726f41c
Preparing development version 2.4.3-SNAPSHOT
cloud-fan Apr 18, 2019
ad0e7df
[MINOR][TEST] Expand spark-submit test to allow python2/3 executable
srowen Apr 18, 2019
7a6d7ca
[SPARK-25079][PYTHON][BRANCH-2.4] update python3 executable to 3.6.x
shaneknapp Apr 19, 2019
150c700
[SPARK-24601][SPARK-27051][BACKPORT][CORE] Update to Jackson 2.9.8
srowen Apr 21, 2019
1a46a84
[SPARK-27496][CORE] Fatal errors should also be sent back to the sender
zsxwing Apr 22, 2019
5e95abf
[SPARK-27419][FOLLOWUP][DOCS] Add note about spark.executor.heartbeat…
srowen Apr 22, 2019
c9cdcf0
[SPARK-27469][BUILD][BRANCH-2.4] Unify commons-beanutils deps to late…
srowen Apr 22, 2019
59b2017
[SPARK-27539][SQL] Fix inaccurate aggregate outputRows estimation wit…
pengbo Apr 23, 2019
2da67de
[SPARK-27544][PYTHON][TEST][BRANCH-2.4] Fix Python test script to wor…
dongjoon-hyun Apr 23, 2019
1379500
[SPARK-27550][TEST][BRANCH-2.4] Fix `test-dependencies.sh` not to use…
dongjoon-hyun Apr 24, 2019
e9d85aa
[MINOR][TEST] switch from 2.4.1 to 2.4.2 in HiveExternalCatalogVersio…
cloud-fan Apr 25, 2019
ae2e6ec
[SPARK-27494][SS] Null values don't work in Kafka source v2
uncleGen Apr 26, 2019
31a1f22
add missing import and fix compilation
cloud-fan Apr 26, 2019
c6c9010
[SPARK-27563][SQL][TEST] automatically get the latest Spark versions …
cloud-fan Apr 26, 2019
8813ddb
[SPARK-26891][BACKPORT-2.4][YARN] Fixing flaky test in YarnSchedulerB…
attilapiros Apr 26, 2019
81d5b34
[SPARK-25535][CORE][BRANCH-2.4] Work around bad error handling in com…
Apr 27, 2019
bb2dc4e
[SPARK-26745][SQL][TESTS] JsonSuite test case: empty line -> 0 record…
Feb 6, 2019
159ba5c
[SPARK-24935][SQL][FOLLOWUP] support INIT -> UPDATE -> MERGE -> FINIS…
cloud-fan Apr 30, 2019
e331c5a
Revert "[SPARK-24601][SPARK-27051][BACKPORT][CORE] Update to Jackson …
gatorsmile Apr 30, 2019
0df1ad7
Preparing Spark release v2.4.3-rc1
Apr 30, 2019
ad8028b
[HOPSWORKS-1081] Upgrade Spark to 2.4.3
kai-chi Jul 23, 2019
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
1 change: 0 additions & 1 deletion LICENSE-binary
Original file line number Diff line number Diff line change
Expand Up @@ -305,7 +305,6 @@ com.google.code.gson:gson
com.google.inject:guice
com.google.inject.extensions:guice-servlet
com.twitter:parquet-hadoop-bundle
commons-beanutils:commons-beanutils-core
commons-cli:commons-cli
commons-dbcp:commons-dbcp
commons-io:commons-io
Expand Down
10 changes: 5 additions & 5 deletions R/pkg/DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
Package: SparkR
Type: Package
Version: 2.4.0
Title: R Frontend for Apache Spark
Description: Provides an R Frontend for Apache Spark.
Version: 2.4.3.0
Title: R Front End for 'Apache Spark'
Description: Provides an R Front end for 'Apache Spark' <https://spark.apache.org>.
Authors@R: c(person("Shivaram", "Venkataraman", role = c("aut", "cre"),
email = "[email protected]"),
person("Xiangrui", "Meng", role = "aut",
Expand All @@ -11,8 +11,8 @@ Authors@R: c(person("Shivaram", "Venkataraman", role = c("aut", "cre"),
email = "[email protected]"),
person(family = "The Apache Software Foundation", role = c("aut", "cph")))
License: Apache License (== 2.0)
URL: http://www.apache.org/ http://spark.apache.org/
BugReports: http://spark.apache.org/contributing.html
URL: https://www.apache.org/ https://spark.apache.org/
BugReports: https://spark.apache.org/contributing.html
SystemRequirements: Java (== 8)
Depends:
R (>= 3.0),
Expand Down
1 change: 1 addition & 0 deletions R/pkg/tests/fulltests/test_streaming.R
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,7 @@ test_that("Specify a schema by using a DDL-formatted string when reading", {
expect_false(awaitTermination(q, 5 * 1000))
callJMethod(q@ssq, "processAllAvailable")
expect_equal(head(sql("SELECT count(*) FROM people3"))[[1]], 3)
stopQuery(q)

expect_error(read.stream(path = parquetPath, schema = "name stri"),
"DataType stri is not supported.")
Expand Down
14 changes: 14 additions & 0 deletions R/pkg/vignettes/sparkr-vignettes.Rmd
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,20 @@ First, let's load and attach the package.
library(SparkR)
```

```{r, include=FALSE}
# disable eval if java version not supported
override_eval <- tryCatch(!is.numeric(SparkR:::checkJavaVersion()),
error = function(e) { TRUE },
warning = function(e) { TRUE })

if (override_eval) {
opts_hooks$set(eval = function(options) {
options$eval = FALSE
options
})
}
```

`SparkSession` is the entry point into SparkR which connects your R program to a Spark cluster. You can create a `SparkSession` using `sparkR.session` and pass in options such as the application name, any Spark packages depended on, etc.

We use default settings in which it runs in local mode. It auto downloads Spark package in the background if no previous installation is found. For more details about setup, see [Spark Session](#SetupSparkSession).
Expand Down
2 changes: 1 addition & 1 deletion assembly/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
<parent>
<groupId>org.apache.spark</groupId>
<artifactId>spark-parent_2.11</artifactId>
<version>2.4.0.1</version>
<version>2.4.3.0</version>
<relativePath>../pom.xml</relativePath>
</parent>

Expand Down
5 changes: 4 additions & 1 deletion bin/spark-shell
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,10 @@ if [ -z "${SPARK_HOME}" ]; then
source "$(dirname "$0")"/find-spark-home
fi

export _SPARK_CMD_USAGE="Usage: ./bin/spark-shell [options]"
export _SPARK_CMD_USAGE="Usage: ./bin/spark-shell [options]

Scala REPL options:
-I <file> preload <file>, enforcing line-by-line interpretation"

# SPARK-4161: scala does not assume use of the java classpath,
# so we need to add the "-Dscala.usejavacp=true" flag manually. We
Expand Down
8 changes: 7 additions & 1 deletion bin/spark-shell2.cmd
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,13 @@ rem
rem Figure out where the Spark framework is installed
call "%~dp0find-spark-home.cmd"

set _SPARK_CMD_USAGE=Usage: .\bin\spark-shell.cmd [options]
set LF=^


rem two empty lines are required
set _SPARK_CMD_USAGE=Usage: .\bin\spark-shell.cmd [options]^%LF%%LF%^%LF%%LF%^
Scala REPL options:^%LF%%LF%^
-I ^<file^> preload ^<file^>, enforcing line-by-line interpretation

rem SPARK-4161: scala does not assume use of the java classpath,
rem so we need to add the "-Dscala.usejavacp=true" flag manually. We
Expand Down
10 changes: 7 additions & 3 deletions build/mvn
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ if [ -n "${ZINC_INSTALL_FLAG}" -o -z "`"${ZINC_BIN}" -status -port ${ZINC_PORT}`
export ZINC_OPTS=${ZINC_OPTS:-"$_COMPILE_JVM_OPTS"}
"${ZINC_BIN}" -shutdown -port ${ZINC_PORT}
"${ZINC_BIN}" -start -port ${ZINC_PORT} \
-server 127.0.0.1 -idle-timeout 30m \
-server 127.0.0.1 -idle-timeout 3h \
-scala-compiler "${SCALA_COMPILER}" \
-scala-library "${SCALA_LIBRARY}" &>/dev/null
fi
Expand All @@ -163,8 +163,12 @@ export MAVEN_OPTS=${MAVEN_OPTS:-"$_COMPILE_JVM_OPTS"}

echo "Using \`mvn\` from path: $MVN_BIN" 1>&2

# Last, call the `mvn` command as usual
# call the `mvn` command as usual
# SPARK-25854
"${MVN_BIN}" -DzincPort=${ZINC_PORT} "$@"
MVN_RETCODE=$?

# Try to shut down zinc explicitly
# Try to shut down zinc explicitly if the server is still running.
"${ZINC_BIN}" -shutdown -port ${ZINC_PORT}

exit $MVN_RETCODE
2 changes: 1 addition & 1 deletion common/kvstore/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
<parent>
<groupId>org.apache.spark</groupId>
<artifactId>spark-parent_2.11</artifactId>
<version>2.4.0.1</version>
<version>2.4.3.0</version>
<relativePath>../../pom.xml</relativePath>
</parent>

Expand Down
2 changes: 1 addition & 1 deletion common/network-common/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
<parent>
<groupId>org.apache.spark</groupId>
<artifactId>spark-parent_2.11</artifactId>
<version>2.4.0.1</version>
<version>2.4.3.0</version>
<relativePath>../../pom.xml</relativePath>
</parent>

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -159,15 +159,21 @@ public void close() throws IOException {
// accurately report the errors when they happen.
RuntimeException error = null;
byte[] dummy = new byte[8];
try {
doCipherOp(encryptor, dummy, true);
} catch (Exception e) {
error = new RuntimeException(e);
if (encryptor != null) {
try {
doCipherOp(Cipher.ENCRYPT_MODE, dummy, true);
} catch (Exception e) {
error = new RuntimeException(e);
}
encryptor = null;
}
try {
doCipherOp(decryptor, dummy, true);
} catch (Exception e) {
error = new RuntimeException(e);
if (decryptor != null) {
try {
doCipherOp(Cipher.DECRYPT_MODE, dummy, true);
} catch (Exception e) {
error = new RuntimeException(e);
}
decryptor = null;
}
random.close();

Expand All @@ -189,11 +195,11 @@ byte[] rawResponse(byte[] challenge) {
}

private byte[] decrypt(byte[] in) throws GeneralSecurityException {
return doCipherOp(decryptor, in, false);
return doCipherOp(Cipher.DECRYPT_MODE, in, false);
}

private byte[] encrypt(byte[] in) throws GeneralSecurityException {
return doCipherOp(encryptor, in, false);
return doCipherOp(Cipher.ENCRYPT_MODE, in, false);
}

private void initializeForAuth(String cipher, byte[] nonce, SecretKeySpec key)
Expand All @@ -205,11 +211,13 @@ private void initializeForAuth(String cipher, byte[] nonce, SecretKeySpec key)
byte[] iv = new byte[conf.ivLength()];
System.arraycopy(nonce, 0, iv, 0, Math.min(nonce.length, iv.length));

encryptor = CryptoCipherFactory.getCryptoCipher(cipher, cryptoConf);
encryptor.init(Cipher.ENCRYPT_MODE, key, new IvParameterSpec(iv));
CryptoCipher _encryptor = CryptoCipherFactory.getCryptoCipher(cipher, cryptoConf);
_encryptor.init(Cipher.ENCRYPT_MODE, key, new IvParameterSpec(iv));
this.encryptor = _encryptor;

decryptor = CryptoCipherFactory.getCryptoCipher(cipher, cryptoConf);
decryptor.init(Cipher.DECRYPT_MODE, key, new IvParameterSpec(iv));
CryptoCipher _decryptor = CryptoCipherFactory.getCryptoCipher(cipher, cryptoConf);
_decryptor.init(Cipher.DECRYPT_MODE, key, new IvParameterSpec(iv));
this.decryptor = _decryptor;
}

/**
Expand Down Expand Up @@ -241,29 +249,52 @@ private SecretKeySpec generateKey(String kdf, int iterations, byte[] salt, int k
return new SecretKeySpec(key.getEncoded(), conf.keyAlgorithm());
}

private byte[] doCipherOp(CryptoCipher cipher, byte[] in, boolean isFinal)
private byte[] doCipherOp(int mode, byte[] in, boolean isFinal)
throws GeneralSecurityException {

Preconditions.checkState(cipher != null);
CryptoCipher cipher;
switch (mode) {
case Cipher.ENCRYPT_MODE:
cipher = encryptor;
break;
case Cipher.DECRYPT_MODE:
cipher = decryptor;
break;
default:
throw new IllegalArgumentException(String.valueOf(mode));
}

int scale = 1;
while (true) {
int size = in.length * scale;
byte[] buffer = new byte[size];
try {
int outSize = isFinal ? cipher.doFinal(in, 0, in.length, buffer, 0)
: cipher.update(in, 0, in.length, buffer, 0);
if (outSize != buffer.length) {
byte[] output = new byte[outSize];
System.arraycopy(buffer, 0, output, 0, output.length);
return output;
} else {
return buffer;
Preconditions.checkState(cipher != null, "Cipher is invalid because of previous error.");

try {
int scale = 1;
while (true) {
int size = in.length * scale;
byte[] buffer = new byte[size];
try {
int outSize = isFinal ? cipher.doFinal(in, 0, in.length, buffer, 0)
: cipher.update(in, 0, in.length, buffer, 0);
if (outSize != buffer.length) {
byte[] output = new byte[outSize];
System.arraycopy(buffer, 0, output, 0, output.length);
return output;
} else {
return buffer;
}
} catch (ShortBufferException e) {
// Try again with a bigger buffer.
scale *= 2;
}
} catch (ShortBufferException e) {
// Try again with a bigger buffer.
scale *= 2;
}
} catch (InternalError ie) {
// SPARK-25535. The commons-cryto library will throw InternalError if something goes wrong,
// and leave bad state behind in the Java wrappers, so it's not safe to use them afterwards.
if (mode == Cipher.ENCRYPT_MODE) {
this.encryptor = null;
} else {
this.decryptor = null;
}
throw ie;
}
}

Expand Down
Loading