Skip to content

Commit 333caf2

Browse files
authored
Code cleanup suggested by Visual Studio (#666)
1 parent a5f707c commit 333caf2

File tree

16 files changed

+119
-145
lines changed

16 files changed

+119
-145
lines changed

examples/Microsoft.Spark.CSharp.Examples/MachineLearning/Sentiment/Program.cs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
// See the LICENSE file in the project root for more information.
44

55
using System;
6-
using System.Collections.Generic;
76
using Microsoft.ML;
87
using Microsoft.ML.Data;
98
using Microsoft.Spark.Sql;
@@ -64,7 +63,7 @@ public static bool Sentiment(string text, string modelPath)
6463

6564
ITransformer mlModel = mlContext
6665
.Model
67-
.Load(modelPath, out var modelInputSchema);
66+
.Load(modelPath, out DataViewSchema _);
6867

6968
PredictionEngine<Review, ReviewPrediction> predEngine = mlContext
7069
.Model

examples/Microsoft.Spark.CSharp.Examples/Sql/Batch/Datasource.cs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@
55
using System;
66
using System.Collections.Generic;
77
using System.IO;
8-
using System.Security.Policy;
98
using Microsoft.Spark.Sql;
109

1110
namespace Microsoft.Spark.Examples.Sql.Batch
@@ -140,6 +139,8 @@ private void RunBasicDatasourceExample(SparkSession spark, string parquet, strin
140139
.Option("header", true)
141140
.Load(csv);
142141

142+
df.PrintSchema();
143+
143144
df = spark.Read().Orc(orc);
144145

145146
df.Write()

src/csharp/Extensions/Microsoft.Spark.Extensions.Hyperspace.E2ETest/Index/IndexConfigTests.cs

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
// See the LICENSE file in the project root for more information.
44

55
using System.Collections.Generic;
6-
using System.Linq;
76
using Microsoft.Spark.E2ETest.Utils;
87
using Microsoft.Spark.Extensions.Hyperspace.Index;
98
using Xunit;
@@ -16,10 +15,6 @@ namespace Microsoft.Spark.Extensions.Hyperspace.E2ETest.Index
1615
[Collection(Constants.HyperspaceTestContainerName)]
1716
public class IndexConfigTests
1817
{
19-
public IndexConfigTests(HyperspaceFixture fixture)
20-
{
21-
}
22-
2318
/// <summary>
2419
/// Test the method signatures for IndexConfig and IndexConfigBuilder APIs.
2520
/// </summary>

src/csharp/Microsoft.Spark.E2ETest/IpcTests/SparkConfTests.cs

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -11,13 +11,6 @@ namespace Microsoft.Spark.E2ETest.IpcTests
1111
[Collection("Spark E2E Tests")]
1212
public class SparkConfTests
1313
{
14-
private readonly SparkFixture _fixture;
15-
16-
public SparkConfTests(SparkFixture fixture)
17-
{
18-
_fixture = fixture;
19-
}
20-
2114
[Fact]
2215
public void TestSparkConf()
2316
{

src/csharp/Microsoft.Spark.E2ETest/IpcTests/Sql/SparkSessionTests.cs

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -78,9 +78,11 @@ public void TestCreateDataFrame()
7878
{
7979
// Calling CreateDataFrame with schema
8080
{
81-
var data = new List<GenericRow>();
82-
data.Add(new GenericRow(new object[] { "Alice", 20, new Date(2020, 1, 1) }));
83-
data.Add(new GenericRow(new object[] { "Bob", 30, new Date(2020, 1, 2) }));
81+
var data = new List<GenericRow>
82+
{
83+
new GenericRow(new object[] { "Alice", 20, new Date(2020, 1, 1) }),
84+
new GenericRow(new object[] { "Bob", 30, new Date(2020, 1, 2) })
85+
};
8486

8587
var schema = new StructType(new List<StructField>()
8688
{

src/csharp/Microsoft.Spark.E2ETest/UdfTests/UdfSimpleTypesTests.cs

Lines changed: 18 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -22,21 +22,23 @@ public class UdfSimpleTypesTests
2222
public UdfSimpleTypesTests(SparkFixture fixture)
2323
{
2424
_spark = fixture.Spark;
25-
var data = new List<GenericRow>();
26-
data.Add(new GenericRow(
27-
new object[]
28-
{
29-
null,
30-
new Date(2020, 1, 1),
31-
new Timestamp(2020, 1, 1, 0, 0, 0, 0)
32-
}));
33-
data.Add(new GenericRow(
34-
new object[]
35-
{
36-
30,
37-
new Date(2020, 1, 2),
38-
new Timestamp(2020, 1, 2, 15, 30, 30, 123456)
39-
}));
25+
var data = new List<GenericRow>
26+
{
27+
new GenericRow(
28+
new object[]
29+
{
30+
null,
31+
new Date(2020, 1, 1),
32+
new Timestamp(2020, 1, 1, 0, 0, 0, 0)
33+
}),
34+
new GenericRow(
35+
new object[]
36+
{
37+
30,
38+
new Date(2020, 1, 2),
39+
new Timestamp(2020, 1, 2, 15, 30, 30, 123456)
40+
})
41+
};
4042
var schema = new StructType(new List<StructField>()
4143
{
4244
new StructField("age", new IntegerType()),
@@ -176,7 +178,7 @@ public void TestUdfWithMultipleThreads()
176178
{
177179
try
178180
{
179-
void DefineUdf() => Udf<string, string>(str => str);
181+
static void DefineUdf() => Udf<string, string>(str => str);
180182

181183
// Define a UDF in the main thread.
182184
Udf<string, string>(str => str);

src/csharp/Microsoft.Spark.UnitTest/CommandSerDeTests.cs

Lines changed: 63 additions & 72 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@
88
using Apache.Arrow;
99
using Microsoft.Data.Analysis;
1010
using Microsoft.Spark.Sql;
11-
using Microsoft.Spark.UnitTest.TestUtils;
1211
using Xunit;
1312
using static Microsoft.Spark.UnitTest.TestUtils.ArrowTestUtils;
1413

@@ -28,22 +27,20 @@ public void TestCommandSerDeForSqlPickling()
2827
Utils.CommandSerDe.SerializedMode.Row,
2928
Utils.CommandSerDe.SerializedMode.Row);
3029

31-
using (var ms = new MemoryStream(serializedCommand))
32-
{
33-
var deserializedWorkerFunction = new PicklingWorkerFunction(
34-
Utils.CommandSerDe.Deserialize<PicklingWorkerFunction.ExecuteDelegate>(
35-
ms,
36-
out Utils.CommandSerDe.SerializedMode serializerMode,
37-
out Utils.CommandSerDe.SerializedMode deserializerMode,
38-
out var runMode));
39-
40-
Assert.Equal(Utils.CommandSerDe.SerializedMode.Row, serializerMode);
41-
Assert.Equal(Utils.CommandSerDe.SerializedMode.Row, deserializerMode);
42-
Assert.Equal("N", runMode);
43-
44-
object result = deserializedWorkerFunction.Func(0, new[] { "spark" }, new[] { 0 });
45-
Assert.Equal("hello spark", result);
46-
}
30+
using var ms = new MemoryStream(serializedCommand);
31+
var deserializedWorkerFunction = new PicklingWorkerFunction(
32+
Utils.CommandSerDe.Deserialize<PicklingWorkerFunction.ExecuteDelegate>(
33+
ms,
34+
out Utils.CommandSerDe.SerializedMode serializerMode,
35+
out Utils.CommandSerDe.SerializedMode deserializerMode,
36+
out var runMode));
37+
38+
Assert.Equal(Utils.CommandSerDe.SerializedMode.Row, serializerMode);
39+
Assert.Equal(Utils.CommandSerDe.SerializedMode.Row, deserializerMode);
40+
Assert.Equal("N", runMode);
41+
42+
object result = deserializedWorkerFunction.Func(0, new[] { "spark" }, new[] { 0 });
43+
Assert.Equal("hello spark", result);
4744
}
4845

4946
[Fact]
@@ -62,24 +59,22 @@ public void TestCommandSerDeForSqlArrow()
6259
Utils.CommandSerDe.SerializedMode.Row,
6360
Utils.CommandSerDe.SerializedMode.Row);
6461

65-
using (var ms = new MemoryStream(serializedCommand))
66-
{
67-
var deserializedWorkerFunction = new ArrowWorkerFunction(
68-
Utils.CommandSerDe.Deserialize<ArrowWorkerFunction.ExecuteDelegate>(
69-
ms,
70-
out Utils.CommandSerDe.SerializedMode serializerMode,
71-
out Utils.CommandSerDe.SerializedMode deserializerMode,
72-
out var runMode));
73-
74-
Assert.Equal(Utils.CommandSerDe.SerializedMode.Row, serializerMode);
75-
Assert.Equal(Utils.CommandSerDe.SerializedMode.Row, deserializerMode);
76-
Assert.Equal("N", runMode);
77-
78-
IArrowArray input = ToArrowArray(new[] { "spark" });
79-
IArrowArray result =
80-
deserializedWorkerFunction.Func(new[] { input }, new[] { 0 });
81-
AssertEquals("hello spark", result);
82-
}
62+
using var ms = new MemoryStream(serializedCommand);
63+
var deserializedWorkerFunction = new ArrowWorkerFunction(
64+
Utils.CommandSerDe.Deserialize<ArrowWorkerFunction.ExecuteDelegate>(
65+
ms,
66+
out Utils.CommandSerDe.SerializedMode serializerMode,
67+
out Utils.CommandSerDe.SerializedMode deserializerMode,
68+
out var runMode));
69+
70+
Assert.Equal(Utils.CommandSerDe.SerializedMode.Row, serializerMode);
71+
Assert.Equal(Utils.CommandSerDe.SerializedMode.Row, deserializerMode);
72+
Assert.Equal("N", runMode);
73+
74+
IArrowArray input = ToArrowArray(new[] { "spark" });
75+
IArrowArray result =
76+
deserializedWorkerFunction.Func(new[] { input }, new[] { 0 });
77+
AssertEquals("hello spark", result);
8378
}
8479

8580
[Fact]
@@ -95,26 +90,24 @@ public void TestCommandSerDeForSqlArrowDataFrame()
9590
Utils.CommandSerDe.SerializedMode.Row,
9691
Utils.CommandSerDe.SerializedMode.Row);
9792

98-
using (var ms = new MemoryStream(serializedCommand))
99-
{
100-
var deserializedWorkerFunction = new DataFrameWorkerFunction(
101-
Utils.CommandSerDe.Deserialize<DataFrameWorkerFunction.ExecuteDelegate>(
102-
ms,
103-
out Utils.CommandSerDe.SerializedMode serializerMode,
104-
out Utils.CommandSerDe.SerializedMode deserializerMode,
105-
out var runMode));
106-
107-
Assert.Equal(Utils.CommandSerDe.SerializedMode.Row, serializerMode);
108-
Assert.Equal(Utils.CommandSerDe.SerializedMode.Row, deserializerMode);
109-
Assert.Equal("N", runMode);
110-
111-
var column = (StringArray)ToArrowArray(new[] { "spark" });
112-
113-
ArrowStringDataFrameColumn ArrowStringDataFrameColumn = ToArrowStringDataFrameColumn(column);
114-
DataFrameColumn result =
115-
deserializedWorkerFunction.Func(new[] { ArrowStringDataFrameColumn }, new[] { 0 });
116-
ArrowTestUtils.AssertEquals("hello spark", result);
117-
}
93+
using var ms = new MemoryStream(serializedCommand);
94+
var deserializedWorkerFunction = new DataFrameWorkerFunction(
95+
Utils.CommandSerDe.Deserialize<DataFrameWorkerFunction.ExecuteDelegate>(
96+
ms,
97+
out Utils.CommandSerDe.SerializedMode serializerMode,
98+
out Utils.CommandSerDe.SerializedMode deserializerMode,
99+
out var runMode));
100+
101+
Assert.Equal(Utils.CommandSerDe.SerializedMode.Row, serializerMode);
102+
Assert.Equal(Utils.CommandSerDe.SerializedMode.Row, deserializerMode);
103+
Assert.Equal("N", runMode);
104+
105+
var column = (StringArray)ToArrowArray(new[] { "spark" });
106+
107+
ArrowStringDataFrameColumn ArrowStringDataFrameColumn = ToArrowStringDataFrameColumn(column);
108+
DataFrameColumn result =
109+
deserializedWorkerFunction.Func(new[] { ArrowStringDataFrameColumn }, new[] { 0 });
110+
AssertEquals("hello spark", result);
118111
}
119112

120113
[Fact]
@@ -139,23 +132,21 @@ public void TestCommandSerDeForRDD()
139132
Utils.CommandSerDe.SerializedMode.Byte,
140133
Utils.CommandSerDe.SerializedMode.Byte);
141134

142-
using (var ms = new MemoryStream(serializedCommand))
143-
{
144-
var deserializedWorkerFunction = new RDD.WorkerFunction(
145-
Utils.CommandSerDe.Deserialize<RDD.WorkerFunction.ExecuteDelegate>(
146-
ms,
147-
out Utils.CommandSerDe.SerializedMode serializerMode,
148-
out Utils.CommandSerDe.SerializedMode deserializerMode,
149-
out var runMode));
150-
151-
Assert.Equal(Utils.CommandSerDe.SerializedMode.Byte, serializerMode);
152-
Assert.Equal(Utils.CommandSerDe.SerializedMode.Byte, deserializerMode);
153-
Assert.Equal("N", runMode);
154-
155-
IEnumerable<object> result =
156-
deserializedWorkerFunction.Func(0, new object[] { 1, 2, 3 });
157-
Assert.Equal(new[] { 13, 15, 17 }, result.Cast<int>());
158-
}
135+
using var ms = new MemoryStream(serializedCommand);
136+
var deserializedWorkerFunction = new RDD.WorkerFunction(
137+
Utils.CommandSerDe.Deserialize<RDD.WorkerFunction.ExecuteDelegate>(
138+
ms,
139+
out Utils.CommandSerDe.SerializedMode serializerMode,
140+
out Utils.CommandSerDe.SerializedMode deserializerMode,
141+
out var runMode));
142+
143+
Assert.Equal(Utils.CommandSerDe.SerializedMode.Byte, serializerMode);
144+
Assert.Equal(Utils.CommandSerDe.SerializedMode.Byte, deserializerMode);
145+
Assert.Equal("N", runMode);
146+
147+
IEnumerable<object> result =
148+
deserializedWorkerFunction.Func(0, new object[] { 1, 2, 3 });
149+
Assert.Equal(new[] { 13, 15, 17 }, result.Cast<int>());
159150
}
160151
}
161152
}

src/csharp/Microsoft.Spark.Worker.UnitTest/CommandExecutorTests.cs

Lines changed: 6 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -30,10 +30,9 @@ public class CommandExecutorTests
3030
[MemberData(nameof(CommandExecutorData.Data), MemberType = typeof(CommandExecutorData))]
3131
public void TestPicklingSqlCommandExecutorWithSingleCommand(
3232
Version sparkVersion,
33-
#pragma warning disable xUnit1026 // Theory methods should use all of their parameters
3433
IpcOptions ipcOptions)
35-
#pragma warning restore xUnit1026 // Theory methods should use all of their parameters
3634
{
35+
_ = ipcOptions;
3736
var udfWrapper = new Sql.PicklingUdfWrapper<string, string>(
3837
(str) => "udf: " + ((str is null) ? "NULL" : str));
3938
var command = new SqlCommand()
@@ -108,10 +107,9 @@ public void TestPicklingSqlCommandExecutorWithSingleCommand(
108107
[MemberData(nameof(CommandExecutorData.Data), MemberType = typeof(CommandExecutorData))]
109108
public void TestPicklingSqlCommandExecutorWithMultiCommands(
110109
Version sparkVersion,
111-
#pragma warning disable xUnit1026 // Theory methods should use all of their parameters
112110
IpcOptions ipcOptions)
113-
#pragma warning restore xUnit1026 // Theory methods should use all of their parameters
114111
{
112+
_ = ipcOptions;
115113
var udfWrapper1 = new Sql.PicklingUdfWrapper<string, string>((str) => $"udf: {str}");
116114
var udfWrapper2 = new Sql.PicklingUdfWrapper<int, int, int>(
117115
(arg1, arg2) => arg1 * arg2);
@@ -197,10 +195,9 @@ public void TestPicklingSqlCommandExecutorWithMultiCommands(
197195
[MemberData(nameof(CommandExecutorData.Data), MemberType = typeof(CommandExecutorData))]
198196
public void TestPicklingSqlCommandExecutorWithEmptyInput(
199197
Version sparkVersion,
200-
#pragma warning disable xUnit1026 // Theory methods should use all of their parameters
201198
IpcOptions ipcOptions)
202-
#pragma warning restore xUnit1026 // Theory methods should use all of their parameters
203199
{
200+
_ = ipcOptions;
204201
var udfWrapper = new Sql.PicklingUdfWrapper<string, string>((str) => $"udf: {str}");
205202
var command = new SqlCommand()
206203
{
@@ -904,12 +901,12 @@ public async Task TestDataFrameGroupedMapCommandExecutor(
904901
Version sparkVersion,
905902
IpcOptions ipcOptions)
906903
{
907-
ArrowStringDataFrameColumn ConvertStrings(ArrowStringDataFrameColumn strings)
904+
static ArrowStringDataFrameColumn ConvertStrings(ArrowStringDataFrameColumn strings)
908905
{
909906
return strings.Apply(cur => $"udf: {cur}");
910907
}
911908

912-
var resultSchema = new Schema.Builder()
909+
Schema resultSchema = new Schema.Builder()
913910
.Field(b => b.Name("arg1").DataType(StringType.Default))
914911
.Field(b => b.Name("arg2").DataType(Int64Type.Default))
915912
.Build();
@@ -1006,10 +1003,9 @@ await arrowWriter.WriteRecordBatchAsync(
10061003

10071004
[Theory]
10081005
[MemberData(nameof(CommandExecutorData.Data), MemberType = typeof(CommandExecutorData))]
1009-
#pragma warning disable xUnit1026 // Theory methods should use all of their parameters
10101006
public void TestRDDCommandExecutor(Version sparkVersion, IpcOptions ipcOptions)
1011-
#pragma warning restore xUnit1026 // Theory methods should use all of their parameters
10121007
{
1008+
_ = ipcOptions;
10131009
static int mapUdf(int a) => a + 3;
10141010
var command = new RDDCommand()
10151011
{

src/csharp/Microsoft.Spark.Worker/Command/RDDCommandExecutor.cs

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ private IEnumerable<object> GetInputIterator(
6868
RDD.Collector.IDeserializer deserializer =
6969
RDD.Collector.GetDeserializer(deserializerMode);
7070

71-
int messageLength = 0;
71+
int messageLength;
7272
while ((messageLength = SerDe.ReadInt32(inputStream)) !=
7373
(int)SpecialLengths.END_OF_DATA_SECTION)
7474
{
@@ -90,8 +90,7 @@ private void WriteOutput(
9090
CommandSerDe.SerializedMode serializerMode,
9191
object message)
9292
{
93-
MemoryStream writeOutputStream = s_writeOutputStream ??
94-
(s_writeOutputStream = new MemoryStream());
93+
MemoryStream writeOutputStream = s_writeOutputStream ??= new MemoryStream();
9594
writeOutputStream.Position = 0;
9695
Serialize(serializerMode, message, writeOutputStream);
9796
SerDe.Write(stream, (int)writeOutputStream.Position);
@@ -112,8 +111,7 @@ private void Serialize(
112111
switch (serializerMode)
113112
{
114113
case CommandSerDe.SerializedMode.Byte:
115-
BinaryFormatter formatter = s_binaryFormatter ??
116-
(s_binaryFormatter = new BinaryFormatter());
114+
BinaryFormatter formatter = s_binaryFormatter ??= new BinaryFormatter();
117115
formatter.Serialize(stream, message);
118116
break;
119117
case CommandSerDe.SerializedMode.None:

0 commit comments

Comments
 (0)