本文整理汇总了Java中org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.security.LocalizerTokenIdentifier类的典型用法代码示例。如果您正苦于以下问题:Java LocalizerTokenIdentifier类的具体用法?Java LocalizerTokenIdentifier怎么用?Java LocalizerTokenIdentifier使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
LocalizerTokenIdentifier类属于org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.security包,在下文中一共展示了LocalizerTokenIdentifier类的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Java代码示例。
示例1: writeCredentials
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.security.LocalizerTokenIdentifier; //导入依赖的package包/类
private void writeCredentials(Path nmPrivateCTokensPath)
throws IOException {
DataOutputStream tokenOut = null;
try {
Credentials credentials = context.getCredentials();
if (UserGroupInformation.isSecurityEnabled()) {
Credentials systemCredentials =
getSystemCredentialsSentFromRM(context);
if (systemCredentials != null) {
credentials = systemCredentials;
}
}
FileContext lfs = getLocalFileContext(getConfig());
tokenOut =
lfs.create(nmPrivateCTokensPath, EnumSet.of(CREATE, OVERWRITE));
LOG.info("Writing credentials to the nmPrivate file "
+ nmPrivateCTokensPath.toString() + ". Credentials list: ");
if (LOG.isDebugEnabled()) {
for (Token<? extends TokenIdentifier> tk : credentials
.getAllTokens()) {
LOG.debug(tk + " : " + buildTokenFingerprint(tk));
}
}
if (UserGroupInformation.isSecurityEnabled()) {
credentials = new Credentials(credentials);
LocalizerTokenIdentifier id = secretManager.createIdentifier();
Token<LocalizerTokenIdentifier> localizerToken =
new Token<LocalizerTokenIdentifier>(id, secretManager);
credentials.addToken(id.getKind(), localizerToken);
}
credentials.writeTokenStorageToStream(tokenOut);
} finally {
if (tokenOut != null) {
tokenOut.close();
}
}
}
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:39,代码来源:ResourceLocalizationService.java
示例2: writeCredentials
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.security.LocalizerTokenIdentifier; //导入依赖的package包/类
private void writeCredentials(Path nmPrivateCTokensPath)
throws IOException {
DataOutputStream tokenOut = null;
try {
Credentials credentials = context.getCredentials();
if (UserGroupInformation.isSecurityEnabled()) {
Credentials systemCredentials =
getSystemCredentialsSentFromRM(context);
if (systemCredentials != null) {
credentials = systemCredentials;
}
}
FileContext lfs = getLocalFileContext(getConfig());
tokenOut =
lfs.create(nmPrivateCTokensPath, EnumSet.of(CREATE, OVERWRITE));
LOG.info("Writing credentials to the nmPrivate file "
+ nmPrivateCTokensPath.toString() + ". Credentials list: ");
if (LOG.isDebugEnabled()) {
for (Token<? extends TokenIdentifier> tk : credentials
.getAllTokens()) {
LOG.debug(tk.getService() + " : " + tk.encodeToUrlString());
}
}
if (UserGroupInformation.isSecurityEnabled()) {
credentials = new Credentials(credentials);
LocalizerTokenIdentifier id = secretManager.createIdentifier();
Token<LocalizerTokenIdentifier> localizerToken =
new Token<LocalizerTokenIdentifier>(id, secretManager);
credentials.addToken(id.getKind(), localizerToken);
}
credentials.writeTokenStorageToStream(tokenOut);
} finally {
if (tokenOut != null) {
tokenOut.close();
}
}
}
开发者ID:yncxcw,项目名称:big-c,代码行数:39,代码来源:ResourceLocalizationService.java
示例3: writeCredentials
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.security.LocalizerTokenIdentifier; //导入依赖的package包/类
private void writeCredentials(Path nmPrivateCTokensPath)
throws IOException {
DataOutputStream tokenOut = null;
try {
Credentials credentials = context.getCredentials();
FileContext lfs = getLocalFileContext(getConfig());
tokenOut =
lfs.create(nmPrivateCTokensPath, EnumSet.of(CREATE, OVERWRITE));
LOG.info("Writing credentials to the nmPrivate file "
+ nmPrivateCTokensPath.toString() + ". Credentials list: ");
if (LOG.isDebugEnabled()) {
for (Token<? extends TokenIdentifier> tk : credentials
.getAllTokens()) {
LOG.debug(tk.getService() + " : " + tk.encodeToUrlString());
}
}
if (UserGroupInformation.isSecurityEnabled()) {
LocalizerTokenIdentifier id = secretManager.createIdentifier();
Token<LocalizerTokenIdentifier> localizerToken =
new Token<LocalizerTokenIdentifier>(id, secretManager);
credentials.addToken(id.getKind(), localizerToken);
}
credentials.writeTokenStorageToStream(tokenOut);
} finally {
if (tokenOut != null) {
tokenOut.close();
}
}
}
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:30,代码来源:ResourceLocalizationService.java
示例4: writeCredentials
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.security.LocalizerTokenIdentifier; //导入依赖的package包/类
private void writeCredentials(Path nmPrivateCTokensPath)
throws IOException {
DataOutputStream tokenOut = null;
try {
Credentials credentials = context.getCredentials();
FileContext lfs = getLocalFileContext(getConfig());
tokenOut =
lfs.create(nmPrivateCTokensPath, EnumSet.of(CREATE, OVERWRITE));
LOG.info("Writing credentials to the nmPrivate file "
+ nmPrivateCTokensPath.toString() + ". Credentials list: ");
if (LOG.isDebugEnabled()) {
for (Token<? extends TokenIdentifier> tk : credentials
.getAllTokens()) {
LOG.debug(tk.getService() + " : " + tk.encodeToUrlString());
}
}
if (UserGroupInformation.isSecurityEnabled()) {
credentials = new Credentials(credentials);
LocalizerTokenIdentifier id = secretManager.createIdentifier();
Token<LocalizerTokenIdentifier> localizerToken =
new Token<LocalizerTokenIdentifier>(id, secretManager);
credentials.addToken(id.getKind(), localizerToken);
}
credentials.writeTokenStorageToStream(tokenOut);
} finally {
if (tokenOut != null) {
tokenOut.close();
}
}
}
开发者ID:chendave,项目名称:hadoop-TCP,代码行数:31,代码来源:ResourceLocalizationService.java
示例5: runLocalization
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.security.LocalizerTokenIdentifier; //导入依赖的package包/类
@SuppressWarnings("deprecation")
public int runLocalization(final InetSocketAddress nmAddr)
throws IOException, InterruptedException {
// load credentials
initDirs(conf, user, appId, lfs, localDirs);
final Credentials creds = new Credentials();
DataInputStream credFile = null;
try {
// assume credentials in cwd
// TODO: Fix
Path tokenPath =
new Path(String.format(TOKEN_FILE_NAME_FMT, localizerId));
credFile = lfs.open(tokenPath);
creds.readTokenStorageStream(credFile);
// Explicitly deleting token file.
lfs.delete(tokenPath, false);
} finally {
if (credFile != null) {
credFile.close();
}
}
// create localizer context
UserGroupInformation remoteUser =
UserGroupInformation.createRemoteUser(user);
remoteUser.addToken(creds.getToken(LocalizerTokenIdentifier.KIND));
final LocalizationProtocol nodeManager =
remoteUser.doAs(new PrivilegedAction<LocalizationProtocol>() {
@Override
public LocalizationProtocol run() {
return getProxy(nmAddr);
}
});
// create user context
UserGroupInformation ugi =
UserGroupInformation.createRemoteUser(user);
for (Token<? extends TokenIdentifier> token : creds.getAllTokens()) {
ugi.addToken(token);
}
ExecutorService exec = null;
try {
exec = createDownloadThreadPool();
CompletionService<Path> ecs = createCompletionService(exec);
localizeFiles(nodeManager, ecs, ugi);
return 0;
} catch (Throwable e) {
// Print traces to stdout so that they can be logged by the NM address
// space.
e.printStackTrace(System.out);
return -1;
} finally {
try {
if (exec != null) {
exec.shutdownNow();
}
LocalDirAllocator.removeContext(appCacheDirContextName);
} finally {
closeFileSystems(ugi);
}
}
}
开发者ID:naver,项目名称:hadoop,代码行数:63,代码来源:ContainerLocalizer.java
示例6: runLocalization
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.security.LocalizerTokenIdentifier; //导入依赖的package包/类
@SuppressWarnings("deprecation")
public int runLocalization(final InetSocketAddress nmAddr)
throws IOException, InterruptedException {
// load credentials
initDirs(conf, user, appId, lfs, localDirs);
final Credentials creds = new Credentials();
DataInputStream credFile = null;
try {
// assume credentials in cwd
// TODO: Fix
credFile = lfs.open(
new Path(String.format(TOKEN_FILE_NAME_FMT, localizerId)));
creds.readTokenStorageStream(credFile);
} finally {
if (credFile != null) {
credFile.close();
}
}
// create localizer context
UserGroupInformation remoteUser =
UserGroupInformation.createRemoteUser(user);
remoteUser.addToken(creds.getToken(LocalizerTokenIdentifier.KIND));
final LocalizationProtocol nodeManager =
remoteUser.doAs(new PrivilegedAction<LocalizationProtocol>() {
@Override
public LocalizationProtocol run() {
return getProxy(nmAddr);
}
});
// create user context
UserGroupInformation ugi =
UserGroupInformation.createRemoteUser(user);
for (Token<? extends TokenIdentifier> token : creds.getAllTokens()) {
ugi.addToken(token);
}
ExecutorService exec = null;
try {
exec = createDownloadThreadPool();
CompletionService<Path> ecs = createCompletionService(exec);
localizeFiles(nodeManager, ecs, ugi);
return 0;
} catch (Throwable e) {
// Print traces to stdout so that they can be logged by the NM address
// space.
e.printStackTrace(System.out);
return -1;
} finally {
try {
if (exec != null) {
exec.shutdownNow();
}
LocalDirAllocator.removeContext(appCacheDirContextName);
} finally {
closeFileSystems(ugi);
}
}
}
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:60,代码来源:ContainerLocalizer.java
示例7: runLocalization
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.security.LocalizerTokenIdentifier; //导入依赖的package包/类
@SuppressWarnings("deprecation")
public void runLocalization(final InetSocketAddress nmAddr)
throws IOException, InterruptedException {
// load credentials
initDirs(conf, user, appId, lfs, localDirs, userFolder);
final Credentials creds = new Credentials();
DataInputStream credFile = null;
try {
// assume credentials in cwd
// TODO: Fix
Path tokenPath =
new Path(String.format(TOKEN_FILE_NAME_FMT, localizerId));
credFile = lfs.open(tokenPath);
creds.readTokenStorageStream(credFile);
// Explicitly deleting token file.
lfs.delete(tokenPath, false);
} finally {
if (credFile != null) {
credFile.close();
}
}
// create localizer context
UserGroupInformation remoteUser =
UserGroupInformation.createRemoteUser(user);
remoteUser.addToken(creds.getToken(LocalizerTokenIdentifier.KIND));
final LocalizationProtocol nodeManager =
remoteUser.doAs(new PrivilegedAction<LocalizationProtocol>() {
@Override
public LocalizationProtocol run() {
return getProxy(nmAddr);
}
});
// create user context
UserGroupInformation ugi =
UserGroupInformation.createRemoteUser(user);
for (Token<? extends TokenIdentifier> token : creds.getAllTokens()) {
ugi.addToken(token);
}
ExecutorService exec = null;
try {
exec = createDownloadThreadPool();
CompletionService<Path> ecs = createCompletionService(exec);
localizeFiles(nodeManager, ecs, ugi);
return;
} catch (Throwable e) {
throw new IOException(e);
} finally {
try {
if (exec != null) {
exec.shutdownNow();
}
LocalDirAllocator.removeContext(appCacheDirContextName);
} finally {
closeFileSystems(ugi);
}
}
}
开发者ID:hopshadoop,项目名称:hops,代码行数:60,代码来源:ContainerLocalizer.java
注:本文中的org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.security.LocalizerTokenIdentifier类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论