@@ -102,9 +102,16 @@ public void testBadRequests() throws IOException {
102102 // pass
103103 }
104104
105- // no stageAttemptId
105+ // wrong number of parts (note that we allow a missing stageAttemptId)
106106 try {
107- resolver .getBlockData ("app0" , "exec1" , "shuffle_1_1_0" );
107+ resolver .getBlockData ("app0" , "exec1" , "shuffle_1_1_0_0_0" );
108+ fail ("Should have failed" );
109+ } catch (RuntimeException e ) {
110+ assertTrue ("Bad error message: " + e , e .getMessage ().contains ("Unexpected block id format" ));
111+ }
112+
113+ try {
114+ resolver .getBlockData ("app0" , "exec1" , "shuffle_1_1" );
108115 fail ("Should have failed" );
109116 } catch (RuntimeException e ) {
110117 assertTrue ("Bad error message: " + e , e .getMessage ().contains ("Unexpected block id format" ));
@@ -145,6 +152,54 @@ private void testReadBlockData(ExternalShuffleBlockResolver resolver, String blo
145152 assertEquals (expected , block0 );
146153 }
147154
155+ @ Test
156+ public void supportLegacySortShuffleBlockIds () throws IOException {
157+ // In Spark 1.6, the stage attempt ID was added to shuffle block ids (SPARK-8029). However,
158+ // during a rolling upgrade, the shuffle service may be restarted with new code but still
159+ // need to serve old apps. So we make sure we can still handle old blocks
160+
161+ ExternalShuffleBlockResolver resolver = new ExternalShuffleBlockResolver (conf , null );
162+ resolver .registerExecutor ("app0" , "exec0" ,
163+ dataContext .createExecutorInfo ("org.apache.spark.shuffle.sort.SortShuffleManager" ));
164+
165+ dataContext .insertLegacySortShuffleData (2 , 1 ,
166+ new byte [][]{"legacy" .getBytes (), "block" .getBytes ()});
167+
168+ testReadBlockData (resolver , "shuffle_2_1_0" , "legacy" );
169+ testReadBlockData (resolver , "shuffle_2_1_1" , "block" );
170+
171+ // verify everything still works when we also register some blocks which do have a
172+ // stageAttemptId
173+ testSortShuffleBlocks ();
174+
175+ testReadBlockData (resolver , "shuffle_2_1_0" , "legacy" );
176+ testReadBlockData (resolver , "shuffle_2_1_1" , "block" );
177+ }
178+
179+ @ Test
180+ public void supportLegacyHashShuffleBlockIds () throws IOException {
181+ // In Spark 1.6, the stage attempt ID was added to shuffle block ids (SPARK-8029). However,
182+ // during a rolling upgrade, the shuffle service may be restarted with new code but still
183+ // need to serve old apps. So we make sure we can still handle old blocks
184+
185+ ExternalShuffleBlockResolver resolver = new ExternalShuffleBlockResolver (conf , null );
186+ resolver .registerExecutor ("app0" , "exec0" ,
187+ dataContext .createExecutorInfo ("org.apache.spark.shuffle.hash.HashShuffleManager" ));
188+
189+ dataContext .insertLegacyHashShuffleData (2 , 0 ,
190+ new byte [][] { "more legacy" .getBytes (), "hash" .getBytes () } );
191+
192+ testReadBlockData (resolver , "shuffle_2_0_0" , "more legacy" );
193+ testReadBlockData (resolver , "shuffle_2_0_1" , "hash" );
194+
195+ // verify everything still works when we also register some blocks which do have a
196+ // stageAttemptId
197+ testHashShuffleBlocks ();
198+
199+ testReadBlockData (resolver , "shuffle_2_0_0" , "more legacy" );
200+ testReadBlockData (resolver , "shuffle_2_0_1" , "hash" );
201+ }
202+
148203 @ Test
149204 public void jsonSerializationOfExecutorRegistration () throws IOException {
150205 ObjectMapper mapper = new ObjectMapper ();
0 commit comments