From 740a37386d8b82759a2443ac62b025b6ed5c9890 Mon Sep 17 00:00:00 2001 From: Maira Wenzel Date: Wed, 20 Nov 2019 12:27:12 -0800 Subject: [PATCH 1/2] fix back end spelling --- README.md | 2 +- deployment/README.md | 2 +- docs/developer-guide.md | 2 +- src/csharp/Microsoft.Spark.E2ETest/SparkFixture.cs | 2 +- src/csharp/Microsoft.Spark/Services/ConfigurationService.cs | 2 +- src/csharp/Microsoft.Spark/Services/IConfigurationService.cs | 2 +- .../scala/org/apache/spark/deploy/dotnet/DotnetRunner.scala | 4 ++-- .../scala/org/apache/spark/deploy/dotnet/DotnetRunner.scala | 4 ++-- 8 files changed, 10 insertions(+), 10 deletions(-) diff --git a/README.md b/README.md index 290991d87..607d1916d 100644 --- a/README.md +++ b/README.md @@ -159,7 +159,7 @@ We welcome contributions! Please review our [contribution guide](CONTRIBUTING.md This project would not have been possible without the outstanding work from the following communities: -- [Apache Spark](https://spark.apache.org/): Unified Analytics Engine for Big Data, the underlying backend execution engine for .NET for Apache Spark +- [Apache Spark](https://spark.apache.org/): Unified Analytics Engine for Big Data, the underlying back-end execution engine for .NET for Apache Spark - [Mobius](https://github.com/Microsoft/Mobius): C# and F# language binding and extensions to Apache Spark, a pre-cursor project to .NET for Apache Spark from the same Microsoft group. - [PySpark](https://spark.apache.org/docs/latest/api/python/index.html): Python bindings for Apache Spark, one of the implementations .NET for Apache Spark derives inspiration from. - [sparkR](https://spark.apache.org/docs/latest/sparkr.html): one of the implementations .NET for Apache Spark derives inspiration from. diff --git a/deployment/README.md b/deployment/README.md index d932ace65..67644e8ab 100644 --- a/deployment/README.md +++ b/deployment/README.md @@ -35,7 +35,7 @@ Deploying your App on the Cloud ``` # Preparing Worker Dependencies -Microsoft.Spark.Worker is a backend component that lives on the individual worker nodes of your Spark cluster. When you want to execute a C# UDF (user-defined function), Spark needs to understand how to launch the .NET CLR to execute this UDF. Microsoft.Spark.Worker provides a collection of classes to Spark that enable this functionality. +Microsoft.Spark.Worker is a back-end component that lives on the individual worker nodes of your Spark cluster. When you want to execute a C# UDF (user-defined function), Spark needs to understand how to launch the .NET CLR to execute this UDF. Microsoft.Spark.Worker provides a collection of classes to Spark that enable this functionality. ## Microsoft.Spark.Worker 1. Select a [Microsoft.Spark.Worker](https://github.com/dotnet/spark/releases) Linux netcoreapp release to be deployed on your cluster. diff --git a/docs/developer-guide.md b/docs/developer-guide.md index 188631dcb..bb5b19568 100644 --- a/docs/developer-guide.md +++ b/docs/developer-guide.md @@ -19,7 +19,7 @@ spark-submit \ and you will see the followng output: ``` *********************************************************************** -* .NET Backend running debug mode. Press enter to exit * +* .NET back end running debug mode. Press enter to exit * *********************************************************************** ``` In this debug mode, `DotnetRunner` does not launch the .NET application, but waits for it to connect. Leave this command prompt window open. diff --git a/src/csharp/Microsoft.Spark.E2ETest/SparkFixture.cs b/src/csharp/Microsoft.Spark.E2ETest/SparkFixture.cs index 7a47eb954..ba838c5f9 100644 --- a/src/csharp/Microsoft.Spark.E2ETest/SparkFixture.cs +++ b/src/csharp/Microsoft.Spark.E2ETest/SparkFixture.cs @@ -71,7 +71,7 @@ public SparkFixture() // Scala-side driver for .NET emits the following message after it is // launched and ready to accept connections. if (!isSparkReady && - arguments.Data.Contains("Backend running debug mode")) + arguments.Data.Contains("back end running debug mode")) { isSparkReady = true; } diff --git a/src/csharp/Microsoft.Spark/Services/ConfigurationService.cs b/src/csharp/Microsoft.Spark/Services/ConfigurationService.cs index 2475d8ba4..4a697d2ef 100644 --- a/src/csharp/Microsoft.Spark/Services/ConfigurationService.cs +++ b/src/csharp/Microsoft.Spark/Services/ConfigurationService.cs @@ -10,7 +10,7 @@ namespace Microsoft.Spark.Services { /// /// Implementation of configuration service that helps getting config settings - /// to be used in .NET backend. + /// to be used in .NET back end. /// internal sealed class ConfigurationService : IConfigurationService { diff --git a/src/csharp/Microsoft.Spark/Services/IConfigurationService.cs b/src/csharp/Microsoft.Spark/Services/IConfigurationService.cs index 5c7a4074f..bae55c8fa 100644 --- a/src/csharp/Microsoft.Spark/Services/IConfigurationService.cs +++ b/src/csharp/Microsoft.Spark/Services/IConfigurationService.cs @@ -10,7 +10,7 @@ namespace Microsoft.Spark.Services internal interface IConfigurationService { /// - /// The port number used for communicating with the .NET backend process. + /// The port number used for communicating with the .NET back-end process. /// int GetBackendPortNumber(); diff --git a/src/scala/microsoft-spark-2.4.x/src/main/scala/org/apache/spark/deploy/dotnet/DotnetRunner.scala b/src/scala/microsoft-spark-2.4.x/src/main/scala/org/apache/spark/deploy/dotnet/DotnetRunner.scala index 68941e648..1c38f6868 100644 --- a/src/scala/microsoft-spark-2.4.x/src/main/scala/org/apache/spark/deploy/dotnet/DotnetRunner.scala +++ b/src/scala/microsoft-spark-2.4.x/src/main/scala/org/apache/spark/deploy/dotnet/DotnetRunner.scala @@ -97,7 +97,7 @@ object DotnetRunner extends Logging { val dotnetBackendThread = new Thread("DotnetBackend") { override def run() { // need to get back dotnetBackendPortNumber because if the value passed to init is 0 - // the port number is dynamically assigned in the backend + // the port number is dynamically assigned in the back end dotnetBackendPortNumber = dotnetBackend.init(dotnetBackendPortNumber) logInfo(s"Port number used by DotnetBackend is $dotnetBackendPortNumber") initialized.release() @@ -147,7 +147,7 @@ object DotnetRunner extends Logging { } else { // scalastyle:off println println("***********************************************************************") - println("* .NET Backend running debug mode. Press enter to exit *") + println("* .NET back end running debug mode. Press enter to exit *") println("***********************************************************************") // scalastyle:on println diff --git a/src/scala/microsoft-spark-3.0.x/src/main/scala/org/apache/spark/deploy/dotnet/DotnetRunner.scala b/src/scala/microsoft-spark-3.0.x/src/main/scala/org/apache/spark/deploy/dotnet/DotnetRunner.scala index 13f541bea..0ceb1a82c 100644 --- a/src/scala/microsoft-spark-3.0.x/src/main/scala/org/apache/spark/deploy/dotnet/DotnetRunner.scala +++ b/src/scala/microsoft-spark-3.0.x/src/main/scala/org/apache/spark/deploy/dotnet/DotnetRunner.scala @@ -97,7 +97,7 @@ object DotnetRunner extends Logging { val dotnetBackendThread = new Thread("DotnetBackend") { override def run() { // need to get back dotnetBackendPortNumber because if the value passed to init is 0 - // the port number is dynamically assigned in the backend + // the port number is dynamically assigned in the back end dotnetBackendPortNumber = dotnetBackend.init(dotnetBackendPortNumber) logInfo(s"Port number used by DotnetBackend is $dotnetBackendPortNumber") initialized.release() @@ -147,7 +147,7 @@ object DotnetRunner extends Logging { } else { // scalastyle:off println println("***********************************************************************") - println("* .NET Backend running debug mode. Press enter to exit *") + println("* .NET back end running debug mode. Press enter to exit *") println("***********************************************************************") // scalastyle:on println From 3fd2c966bf73af7ce9d70c624fd89e614c77fc85 Mon Sep 17 00:00:00 2001 From: Maira Wenzel Date: Thu, 31 Mar 2022 08:58:05 -0700 Subject: [PATCH 2/2] undo some changes --- docs/developer-guide.md | 4 ++-- src/csharp/Microsoft.Spark.E2ETest/SparkFixture.cs | 6 +++--- .../scala/org/apache/spark/deploy/dotnet/DotnetRunner.scala | 2 +- .../scala/org/apache/spark/deploy/dotnet/DotnetRunner.scala | 2 +- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/docs/developer-guide.md b/docs/developer-guide.md index bb5b19568..6d7115079 100644 --- a/docs/developer-guide.md +++ b/docs/developer-guide.md @@ -16,10 +16,10 @@ spark-submit \ \ debug ``` -and you will see the followng output: +and you will see the following output: ``` *********************************************************************** -* .NET back end running debug mode. Press enter to exit * +* .NET Backend running debug mode. Press enter to exit * *********************************************************************** ``` In this debug mode, `DotnetRunner` does not launch the .NET application, but waits for it to connect. Leave this command prompt window open. diff --git a/src/csharp/Microsoft.Spark.E2ETest/SparkFixture.cs b/src/csharp/Microsoft.Spark.E2ETest/SparkFixture.cs index 4202755eb..965520569 100644 --- a/src/csharp/Microsoft.Spark.E2ETest/SparkFixture.cs +++ b/src/csharp/Microsoft.Spark.E2ETest/SparkFixture.cs @@ -41,7 +41,7 @@ public class EnvironmentVariableNames private readonly Process _process = new Process(); private readonly TemporaryDirectory _tempDirectory = new TemporaryDirectory(); - + public const string DefaultLogLevel = "ERROR"; internal SparkSession Spark { get; } @@ -77,7 +77,7 @@ public SparkFixture() // Scala-side driver for .NET emits the following message after it is // launched and ready to accept connections. if (!isSparkReady && - arguments.Data.Contains("back end running debug mode")) + arguments.Data.Contains("Backend running debug mode")) { isSparkReady = true; } @@ -110,7 +110,7 @@ public SparkFixture() .Config("spark.ui.showConsoleProgress", false) .AppName("Microsoft.Spark.E2ETest") .GetOrCreate(); - + Spark.SparkContext.SetLogLevel(DefaultLogLevel); Jvm = Spark.Reference.Jvm; diff --git a/src/scala/microsoft-spark-3-0/src/main/scala/org/apache/spark/deploy/dotnet/DotnetRunner.scala b/src/scala/microsoft-spark-3-0/src/main/scala/org/apache/spark/deploy/dotnet/DotnetRunner.scala index 340a83825..33f0822ee 100644 --- a/src/scala/microsoft-spark-3-0/src/main/scala/org/apache/spark/deploy/dotnet/DotnetRunner.scala +++ b/src/scala/microsoft-spark-3-0/src/main/scala/org/apache/spark/deploy/dotnet/DotnetRunner.scala @@ -161,7 +161,7 @@ object DotnetRunner extends Logging { } else { // scalastyle:off println println("***********************************************************************") - println("* .NET back end running debug mode. Press enter to exit *") + println("* .NET Backend running debug mode. Press enter to exit *") println("***********************************************************************") // scalastyle:on println diff --git a/src/scala/microsoft-spark-3-1/src/main/scala/org/apache/spark/deploy/dotnet/DotnetRunner.scala b/src/scala/microsoft-spark-3-1/src/main/scala/org/apache/spark/deploy/dotnet/DotnetRunner.scala index ee210533f..4e78c5bc2 100644 --- a/src/scala/microsoft-spark-3-1/src/main/scala/org/apache/spark/deploy/dotnet/DotnetRunner.scala +++ b/src/scala/microsoft-spark-3-1/src/main/scala/org/apache/spark/deploy/dotnet/DotnetRunner.scala @@ -161,7 +161,7 @@ object DotnetRunner extends Logging { } else { // scalastyle:off println println("***********************************************************************") - println("* .NET back end running debug mode. Press enter to exit *") + println("* .NET Backend running debug mode. Press enter to exit *") println("***********************************************************************") // scalastyle:on println pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy