本文整理汇总了C#中Microsoft.Spark.CSharp.Interop.Ipc.JvmObjectReference类的典型用法代码示例。如果您正苦于以下问题:C# JvmObjectReference类的具体用法?C# JvmObjectReference怎么用?C# JvmObjectReference使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
JvmObjectReference类属于Microsoft.Spark.CSharp.Interop.Ipc命名空间,在下文中一共展示了JvmObjectReference类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C#代码示例。
示例1: CreateCSharpDStream
public IDStreamProxy CreateCSharpDStream(IDStreamProxy jdstream, byte[] func, string deserializer)
{
var jvmDStreamReference = SparkCLRIpcProxy.JvmBridge.CallConstructor("org.apache.spark.streaming.api.csharp.CSharpDStream",
new object[] { (jdstream as DStreamIpcProxy).jvmDStreamReference, func, deserializer });
var javaDStreamReference = new JvmObjectReference((string)SparkCLRIpcProxy.JvmBridge.CallNonStaticJavaMethod(jvmDStreamReference, "asJavaDStream"));
return new DStreamIpcProxy(javaDStreamReference, jvmDStreamReference);
}
开发者ID:hhland,项目名称:SparkCLR,代码行数:8,代码来源:SparkCLRIpcProxy.cs
示例2: StreamingContextIpcProxy
public StreamingContextIpcProxy(SparkContext sparkContext, long durationMs)
{
this.sparkContext = sparkContext;
sparkContextProxy = sparkContext.SparkContextProxy;
var jduration = SparkCLRIpcProxy.JvmBridge.CallConstructor("org.apache.spark.streaming.Duration", new object[] { durationMs });
JvmObjectReference jvmSparkContextReference = (sparkContextProxy as SparkContextIpcProxy).JvmSparkContextReference;
jvmStreamingContextReference = SparkCLRIpcProxy.JvmBridge.CallConstructor("org.apache.spark.streaming.StreamingContext", new object[] { jvmSparkContextReference, jduration });
jvmJavaStreamingReference = SparkCLRIpcProxy.JvmBridge.CallConstructor("org.apache.spark.streaming.api.java.JavaStreamingContext", new object[] { jvmStreamingContextReference });
}
开发者ID:edepaula,项目名称:SparkCLR,代码行数:10,代码来源:StreamingContextIpcProxy.cs
示例3: CreateDataFrame
public IDataFrameProxy CreateDataFrame(IRDDProxy rddProxy, IStructTypeProxy structTypeProxy)
{
var rdd = new JvmObjectReference(SparkCLRIpcProxy.JvmBridge.CallStaticJavaMethod("org.apache.spark.sql.api.csharp.SQLUtils", "byteArrayRDDToAnyArrayRDD",
new object[] { (rddProxy as RDDIpcProxy).JvmRddReference }).ToString());
return new DataFrameIpcProxy(
new JvmObjectReference(
SparkCLRIpcProxy.JvmBridge.CallNonStaticJavaMethod(jvmSqlContextReference, "applySchemaToPythonRDD",
new object[] { rdd, (structTypeProxy as StructTypeIpcProxy).JvmStructTypeReference }).ToString()), this);
}
开发者ID:xsidurd,项目名称:SparkCLR,代码行数:10,代码来源:SqlContextIpcProxy.cs
示例4: StreamingContextIpcProxy
public StreamingContextIpcProxy(string checkpointPath)
{
jvmJavaStreamingReference = SparkCLRIpcProxy.JvmBridge.CallConstructor("org.apache.spark.streaming.api.java.JavaStreamingContext", new object[] { checkpointPath });
jvmStreamingContextReference = new JvmObjectReference((string)SparkCLRIpcProxy.JvmBridge.CallNonStaticJavaMethod(jvmJavaStreamingReference, "ssc"));
JvmObjectReference jvmSparkContextReference = new JvmObjectReference((string)SparkCLRIpcProxy.JvmBridge.CallNonStaticJavaMethod(jvmStreamingContextReference, "sc"));
JvmObjectReference jvmSparkConfReference = new JvmObjectReference((string)SparkCLRIpcProxy.JvmBridge.CallNonStaticJavaMethod(jvmStreamingContextReference, "conf"));
JvmObjectReference jvmJavaContextReference = new JvmObjectReference((string)SparkCLRIpcProxy.JvmBridge.CallNonStaticJavaMethod(jvmJavaStreamingReference, "sparkContext"));
sparkContextProxy = new SparkContextIpcProxy(jvmSparkContextReference, jvmJavaContextReference);
var sparkConfProxy = new SparkConfIpcProxy(jvmSparkConfReference);
sparkContext = new SparkContext(sparkContextProxy, new SparkConf(sparkConfProxy));
}
开发者ID:nkiranrao,项目名称:SparkCLR,代码行数:11,代码来源:StreamingContextIpcProxy.cs
示例5: GetJobInfo
public SparkJobInfo GetJobInfo(int jobId)
{
var jobInfoId = SparkCLRIpcProxy.JvmBridge.CallNonStaticJavaMethod(jvmStatusTrackerReference, "getJobInfo", new object[] { jobId });
if (jobInfoId == null)
return null;
JvmObjectReference jJobInfo = new JvmObjectReference((string)jobInfoId);
int[] stageIds = (int[])SparkCLRIpcProxy.JvmBridge.CallNonStaticJavaMethod(jJobInfo, "stageIds");
string status = SparkCLRIpcProxy.JvmBridge.CallNonStaticJavaMethod(jJobInfo, "status").ToString();
return new SparkJobInfo(jobId, stageIds, status);
}
开发者ID:resnick1223,项目名称:SparkCLR,代码行数:12,代码来源:StatusTrackerIpcProxy.cs
示例6: CheckpointExists
public bool CheckpointExists(string checkpointPath)
{
if (checkpointPath == null)
return false;
var path = SparkCLRIpcProxy.JvmBridge.CallConstructor("org.apache.hadoop.fs.Path", checkpointPath);
var conf = SparkCLRIpcProxy.JvmBridge.CallConstructor("org.apache.hadoop.conf.Configuration");
var fs = new JvmObjectReference((string)SparkCLRIpcProxy.JvmBridge.CallNonStaticJavaMethod(path, "getFileSystem", conf));
return (bool)SparkCLRIpcProxy.JvmBridge.CallNonStaticJavaMethod(fs, "exists", path) &&
SparkCLRIpcProxy.JvmBridge.CallNonStaticJavaMethod(fs, "listStatus", path) != null;
}
开发者ID:corba777,项目名称:Mobius,代码行数:12,代码来源:SparkCLRIpcProxy.cs
示例7: StreamingContextIpcProxy
public StreamingContextIpcProxy(SparkContext sparkContext, int durationSeconds)
{
this.sparkContext = sparkContext;
sparkContextProxy = sparkContext.SparkContextProxy;
var jduration = JvmBridgeUtils.GetJavaDuration(durationSeconds);
JvmObjectReference jvmSparkContextReference = (sparkContextProxy as SparkContextIpcProxy).JvmSparkContextReference;
jvmStreamingContextReference = SparkCLRIpcProxy.JvmBridge.CallConstructor("org.apache.spark.streaming.StreamingContext", new object[] { jvmSparkContextReference, jduration });
jvmJavaStreamingReference = SparkCLRIpcProxy.JvmBridge.CallConstructor("org.apache.spark.streaming.api.java.JavaStreamingContext", new object[] { jvmStreamingContextReference });
StartAccumulatorServer(sparkContext);
StartCallbackServer();
}
开发者ID:cyruszhang,项目名称:Mobius,代码行数:13,代码来源:StreamingContextIpcProxy.cs
示例8: StreamingContextIpcProxy
public StreamingContextIpcProxy(SparkContext sparkContext, long durationMs)
{
this.sparkContext = sparkContext;
sparkContextProxy = sparkContext.SparkContextProxy;
var jduration = SparkCLRIpcProxy.JvmBridge.CallConstructor("org.apache.spark.streaming.Duration", new object[] { durationMs });
JvmObjectReference jvmSparkContextReference = (sparkContextProxy as SparkContextIpcProxy).JvmSparkContextReference;
jvmStreamingContextReference = SparkCLRIpcProxy.JvmBridge.CallConstructor("org.apache.spark.streaming.StreamingContext", new object[] { jvmSparkContextReference, jduration });
jvmJavaStreamingReference = SparkCLRIpcProxy.JvmBridge.CallConstructor("org.apache.spark.streaming.api.java.JavaStreamingContext", new object[] { jvmStreamingContextReference });
int port = StartCallback();
SparkCLRIpcProxy.JvmBridge.CallStaticJavaMethod("SparkCLRHandler", "connectCallback", port); //className and methodName hardcoded in CSharpBackendHandler
}
开发者ID:CapeTownCoders,项目名称:SparkCLR,代码行数:13,代码来源:StreamingContextIpcProxy.cs
示例9: RegisterFunction
public void RegisterFunction(string name, byte[] command, string returnType)
{
var judf = new JvmObjectReference((string)SparkCLRIpcProxy.JvmBridge.CallNonStaticJavaMethod(jvmSqlContextReference, "udf"));
var hashTableReference = SparkCLRIpcProxy.JvmBridge.CallConstructor("java.util.Hashtable", new object[] { });
var arrayListReference = SparkCLRIpcProxy.JvmBridge.CallConstructor("java.util.ArrayList", new object[] { });
SparkCLRIpcProxy.JvmBridge.CallNonStaticJavaMethod(judf, "registerPython",
new object[]
{
name, command, hashTableReference, arrayListReference,
SparkCLREnvironment.ConfigurationService.GetCSharpWorkerExePath(),
"1.0",
arrayListReference, null, "\"" + returnType + "\""
});
}
开发者ID:resnick1223,项目名称:SparkCLR,代码行数:16,代码来源:SqlContextIpcProxy.cs
示例10: GetStageInfo
public SparkStageInfo GetStageInfo(int stageId)
{
var stageInfoId = SparkCLRIpcProxy.JvmBridge.CallNonStaticJavaMethod(jvmStatusTrackerReference, "getStageInfo", new object[] { stageId });
if (stageInfoId == null)
return null;
JvmObjectReference jStageInfo = new JvmObjectReference((string)stageInfoId);
int currentAttemptId = (int)SparkCLRIpcProxy.JvmBridge.CallNonStaticJavaMethod(jStageInfo, "currentAttemptId");
int submissionTime = (int)SparkCLRIpcProxy.JvmBridge.CallNonStaticJavaMethod(jStageInfo, "submissionTime");
string name = (string)SparkCLRIpcProxy.JvmBridge.CallNonStaticJavaMethod(jStageInfo, "name");
int numTasks = (int)SparkCLRIpcProxy.JvmBridge.CallNonStaticJavaMethod(jStageInfo, "numTasks");
int numActiveTasks = (int)SparkCLRIpcProxy.JvmBridge.CallNonStaticJavaMethod(jStageInfo, "numActiveTasks");
int numCompletedTasks = (int)SparkCLRIpcProxy.JvmBridge.CallNonStaticJavaMethod(jStageInfo, "numCompletedTasks");
int numFailedTasks = (int)SparkCLRIpcProxy.JvmBridge.CallNonStaticJavaMethod(jStageInfo, "numFailedTasks");
return new SparkStageInfo(stageId, currentAttemptId, (long)submissionTime, name, numTasks, numActiveTasks, numCompletedTasks, numFailedTasks);
}
开发者ID:resnick1223,项目名称:SparkCLR,代码行数:17,代码来源:StatusTrackerIpcProxy.cs
示例11: TestJVMObjectRelease
public void TestJVMObjectRelease()
{
//make weak object manager wait for 2 secs for initial validation before start releasing objects
var weakObjectManager = new WeakObjectManagerImpl(TimeSpan.FromSeconds(2)) { ObjectReleaser = new MockObjectReleaser() };
//reset WeakObjectManager for validation - this is to avoid side effects *from* other tests
SparkCLREnvironment.WeakObjectManager = weakObjectManager;
var waitEndTime = DateTime.Now + TimeSpan.FromSeconds(4);
//no items added
Assert.AreEqual(0, weakObjectManager.GetReferencesCount());
var totalItemCount = 10;
for (var k = 1; k <= totalItemCount; k++)
{
//each object adds itself to WeakObjectManager
var obj = new JvmObjectReference(k.ToString());
}
//all items added should be available
Assert.AreEqual(totalItemCount, weakObjectManager.GetReferencesCount());
//reset check interval to start releasing objects
weakObjectManager.CheckInterval = TimeSpan.FromMilliseconds(200);
GC.Collect();
GC.WaitForPendingFinalizers();
//reset check interval to default
weakObjectManager.CheckInterval = WeakObjectManagerImpl.DefaultCheckInterval;
var remainingTimeToWait = waitEndTime - DateTime.Now;
if (remainingTimeToWait.TotalMilliseconds > 0)
{
Thread.Sleep(remainingTimeToWait);
}
var countAfterReleasingObjects = weakObjectManager.GetReferencesCount();
var aliveCount = weakObjectManager.GetAliveCount();
//validate that some items are released
Assert.AreEqual(1, countAfterReleasingObjects);
Assert.IsTrue(countAfterReleasingObjects < totalItemCount);
//validate that unreleased items are alive items
Assert.AreEqual(0, countAfterReleasingObjects - aliveCount);
}
开发者ID:yxbdali,项目名称:Mobius,代码行数:45,代码来源:WeakObjectManagerTest.cs
示例12: EnumerateFiles
/// <summary>
/// List the names of all the files under the given path.
/// </summary>
public IEnumerable<string> EnumerateFiles(string path)
{
var pathJvmReference = SparkCLRIpcProxy.JvmBridge.CallConstructor("org.apache.hadoop.fs.Path", path);
var statusList = (List<JvmObjectReference>)SparkCLRIpcProxy.JvmBridge.CallNonStaticJavaMethod(jvmHdfsReference, "listStatus", pathJvmReference);
if (statusList == null || statusList.Count == 0)
{
return new string[0];
}
var files = new string[statusList.Count];
for (var i = 0; i < statusList.Count; i++)
{
var subPathJvmReference = new JvmObjectReference((string)SparkCLRIpcProxy.JvmBridge.CallNonStaticJavaMethod(statusList[i], "getPath"));
files[i] = (string)SparkCLRIpcProxy.JvmBridge.CallNonStaticJavaMethod(subPathJvmReference, "getName");
}
return files;
}
开发者ID:xsidurd,项目名称:SparkCLR,代码行数:22,代码来源:HdfsFileSystemHelper.cs
示例13: Union
public IRDDProxy Union(IEnumerable<IRDDProxy> rdds)
{
var jfirst = (rdds.First() as RDDIpcProxy).JvmRddReference;
var jrest = JvmBridgeUtils.GetJavaList<JvmObjectReference>(rdds.Skip(1).Select(r => (r as RDDIpcProxy).JvmRddReference));
var jvmRddReference = new JvmObjectReference((string)SparkCLRIpcProxy.JvmBridge.CallNonStaticJavaMethod(jvmJavaContextReference, "union", new object[] { jfirst, jrest }));
return new RDDIpcProxy(jvmRddReference);
}
开发者ID:guiling,项目名称:SparkCLR,代码行数:7,代码来源:SparkContextIpcProxy.cs
示例14: SequenceFile
public IRDDProxy SequenceFile(string filePath, string keyClass, string valueClass, string keyConverterClass, string valueConverterClass, int minSplits, int batchSize)
{
var jvmRddReference = new JvmObjectReference((string) SparkCLRIpcProxy.JvmBridge.CallStaticJavaMethod("org.apache.spark.api.python.PythonRDD", "sequenceFile",
new object[] { jvmJavaContextReference, filePath, keyClass, valueClass, keyConverterClass, valueConverterClass, minSplits, batchSize }));
return new RDDIpcProxy(jvmRddReference);
}
开发者ID:guiling,项目名称:SparkCLR,代码行数:6,代码来源:SparkContextIpcProxy.cs
示例15: ReadBroadcastFromFile
public IBroadcastProxy ReadBroadcastFromFile(string path, out long broadcastId)
{
JvmObjectReference jbroadcast = new JvmObjectReference((string)SparkCLRIpcProxy.JvmBridge.CallStaticJavaMethod("org.apache.spark.api.python.PythonRDD", "readBroadcastFromFile", new object[] { jvmJavaContextReference, path }));
broadcastId = (long)(double)SparkCLRIpcProxy.JvmBridge.CallNonStaticJavaMethod(jbroadcast, "id");
jvmBroadcastReferences.Add(jbroadcast);
return new BroadcastIpcProxy(jbroadcast, this);
}
开发者ID:guiling,项目名称:SparkCLR,代码行数:7,代码来源:SparkContextIpcProxy.cs
示例16: Parallelize
public IRDDProxy Parallelize(IEnumerable<byte[]> values, int numSlices)
{
var jvmRddReference = new JvmObjectReference((string)SparkCLRIpcProxy.JvmBridge.CallStaticJavaMethod("org.apache.spark.api.csharp.CSharpRDD", "createRDDFromArray", new object[] { jvmSparkContextReference, values, numSlices }));
return new RDDIpcProxy(jvmRddReference);
}
开发者ID:guiling,项目名称:SparkCLR,代码行数:5,代码来源:SparkContextIpcProxy.cs
示例17: NewAPIHadoopRDD
public IRDDProxy NewAPIHadoopRDD(string inputFormatClass, string keyClass, string valueClass, string keyConverterClass, string valueConverterClass, IEnumerable<KeyValuePair<string, string>> conf, int batchSize)
{
var jconf = JvmBridgeUtils.GetJavaHashMap<string, string>(conf);
var jvmRddReference = new JvmObjectReference((string)SparkCLRIpcProxy.JvmBridge.CallStaticJavaMethod("org.apache.spark.api.python.PythonRDD", "newAPIHadoopRDD",
new object[] { jvmJavaContextReference, inputFormatClass, keyClass, valueClass, keyConverterClass, valueConverterClass, jconf, batchSize }));
return new RDDIpcProxy(jvmRddReference);
}
开发者ID:guiling,项目名称:SparkCLR,代码行数:7,代码来源:SparkContextIpcProxy.cs
示例18: BroadcastIpcProxy
public BroadcastIpcProxy(JvmObjectReference jvmBroadcastReference, SparkContextIpcProxy sparkContextIpcProxy)
{
this.jvmBroadcastReference = jvmBroadcastReference;
this.sparkContextIpcProxy = sparkContextIpcProxy;
}
开发者ID:guiling,项目名称:SparkCLR,代码行数:5,代码来源:SparkContextIpcProxy.cs
示例19: EmptyRDD
public IRDDProxy EmptyRDD()
{
var jvmRddReference = new JvmObjectReference((string)SparkCLRIpcProxy.JvmBridge.CallNonStaticJavaMethod(jvmJavaContextReference, "emptyRDD"));
return new RDDIpcProxy(jvmRddReference);
}
开发者ID:guiling,项目名称:SparkCLR,代码行数:5,代码来源:SparkContextIpcProxy.cs
示例20: CreateUserDefinedCSharpFunction
public IUDFProxy CreateUserDefinedCSharpFunction(string name, byte[] command, string returnType = "string")
{
var jSqlContext = SparkCLRIpcProxy.JvmBridge.CallConstructor("org.apache.spark.sql.SQLContext", new object[] { jvmSparkContextReference });
var jDataType = new JvmObjectReference((string)SparkCLRIpcProxy.JvmBridge.CallNonStaticJavaMethod(jSqlContext, "parseDataType", new object[] { "\"" + returnType + "\"" }));
var jbroadcastVariables = JvmBridgeUtils.GetJavaList<JvmObjectReference>(jvmBroadcastReferences);
var hashTableReference = SparkCLRIpcProxy.JvmBridge.CallConstructor("java.util.Hashtable", new object[] { });
var arrayListReference = SparkCLRIpcProxy.JvmBridge.CallConstructor("java.util.ArrayList", new object[] { });
return new UDFIpcProxy(SparkCLRIpcProxy.JvmBridge.CallConstructor("org.apache.spark.sql.UserDefinedPythonFunction",
new object[]
{
name, command, hashTableReference, arrayListReference,
SparkCLREnvironment.ConfigurationService.GetCSharpWorkerExePath(),
"1.0",
jbroadcastVariables, jvmAccumulatorReference, jDataType
}));
}
开发者ID:guiling,项目名称:SparkCLR,代码行数:18,代码来源:SparkContextIpcProxy.cs
注:本文中的Microsoft.Spark.CSharp.Interop.Ipc.JvmObjectReference类示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论