org.pentaho.di.job.Job类的使用及代码示例

x33g5p2x  于2022-01-22 转载在 其他  
字(15.6k)|赞(0)|评价(0)|浏览(125)

本文整理了Java中org.pentaho.di.job.Job类的一些代码示例,展示了Job类的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Job类的具体详情如下:
包路径:org.pentaho.di.job.Job
类名称:Job

Job介绍

[英]This class executes a job as defined by a JobMeta object.

The definition of a PDI job is represented by a JobMeta object. It is typically loaded from a .kjb file, a PDI repository, or it is generated dynamically. The declared parameters of the job definition are then queried using listParameters() and assigned values using calls to setParameterValue(..).
[中]此类执行由JobMeta对象定义的作业。
PDI作业的定义由JobMeta对象表示。它通常是从数据库加载的。kjb文件、PDI存储库或动态生成。然后使用listParameters()查询作业定义的已声明参数,并通过调用setParameterValue(..)为其赋值。

代码示例

代码示例来源:origin: pentaho/pentaho-kettle

@Test
public void testRunWithExceptionOnExecuteSetsResult() throws Exception {
 when( mockJob.isStopped() ).thenReturn( false );
 when( mockJob.getParentJob() ).thenReturn( parentJob );
 when( parentJob.isStopped() ).thenReturn( false );
 doThrow( KettleException.class ).when( mockJob ).execute( anyInt(), any( Result.class ) );
 jobRunner.run();
 verify( mockJob, times( 1 ) ).setResult( Mockito.any( Result.class ) );
}

代码示例来源:origin: pentaho/pentaho-kettle

/**
 * Gets the job name.
 *
 * @return jobName
 */
public String getObjectName() {
 return getJobname();
}

代码示例来源:origin: pentaho/pentaho-kettle

CarteObjectEntry entry = CarteResource.getCarteObjectEntry( id );
try {
 if ( job.isInitialized() && !job.isActive() ) {
  if ( job.getRep() != null && !job.getRep().isConnected() ) {
   if ( job.getRep().getUserInfo() != null ) {
    job.getRep().connect( job.getRep().getUserInfo().getLogin(), job.getRep().getUserInfo().getPassword() );
   } else {
    job.getRep().connect( null, null );
   servletLoggingObject.setContainerObjectId( carteObjectId );
   Job newJob = new Job( job.getRep(), job.getJobMeta(), servletLoggingObject );
   newJob.setLogLevel( job.getLogLevel() );
   KettleLogStore.discardLines( job.getLogChannelId(), true );
 job.start();
} catch ( KettleException e ) {
 e.printStackTrace();

代码示例来源:origin: pentaho/pentaho-kettle

/**
 * Sets the parent job.
 *
 * @param parentJob
 *          the new parent job
 */
public void setParentJob( Job parentJob ) {
 this.parentJob = parentJob;
 this.logLevel = parentJob.getLogLevel();
 this.log = new LogChannel( this, parentJob );
 this.containerObjectId = parentJob.getContainerObjectId();
}

代码示例来源:origin: pentaho/pentaho-kettle

public String getStatus() {
 String message;
 if ( isActive() ) {
  if ( isStopped() ) {
   message = Trans.STRING_HALTING;
  } else {
   message = Trans.STRING_RUNNING;
  }
 } else if ( isFinished() ) {
  message = Trans.STRING_FINISHED;
  if ( getResult().getNrErrors() > 0 ) {
   message += " (with errors)";
  }
 } else if ( isStopped() ) {
  message = Trans.STRING_STOPPED;
  if ( getResult().getNrErrors() > 0 ) {
   message += " (with errors)";
  }
 } else {
  message = Trans.STRING_WAITING;
 }
 return message;
}

代码示例来源:origin: pentaho/pentaho-kettle

public synchronized void registerJob( Job job, JobConfiguration jobConfiguration ) {
 job.setContainerObjectId( UUID.randomUUID().toString() );
 CarteObjectEntry entry = new CarteObjectEntry( job.getJobMeta().getName(), job.getContainerObjectId() );
 jobMap.put( entry, job );
 configurationMap.put( entry, jobConfiguration );
}

代码示例来源:origin: pentaho/pentaho-kettle

LogLevel jobLogLevel = parentJob.getLogLevel();
 if ( parentJob.getJobMeta() != null ) {
  parentJob.getJobMeta().setInternalKettleVariables();
  args1 = parentJob.getArguments();
   String[] parentParameters = parentJob.listParameters();
   for ( int idx = 0; idx < parentParameters.length; idx++ ) {
    String par = parentParameters[idx];
    String def = parentJob.getParameterDefault( par );
    String val = parentJob.getParameterValue( par );
    String des = parentJob.getParameterDescription( par );
   try {
    ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.SpoonTransBeforeStart.id, new Object[] {
     executionConfiguration, parentJob.getJobMeta(), jobMeta, rep
    } );
    List<Object> items = Arrays.asList( runConfiguration, false );
         .RunConfigurationSelection.id, items );
     if ( waitingToFinish && (Boolean) items.get( IS_PENTAHO ) ) {
      String jobName = parentJob.getJobMeta().getName();
      String name = jobMeta.getName();
      logBasic( BaseMessages.getString( PKG, "JobJob.Log.InvalidRunConfigurationCombination", jobName,
    remoteSlaveServer = parentJob.getJobMeta().findSlaveServer( realRemoteSlaveServerName );
    if ( remoteSlaveServer == null ) {

代码示例来源:origin: pentaho/pentaho-kettle

if ( job == null || ( job.isFinished() || job.isStopped() ) && !job.isActive() ) {
  if ( job == null || ( job != null && !job.isActive() ) ) {
   try {
     KettleLogStore.discardLines( job.getLogChannelId(), true );
    spoonLoggingObject.setContainerObjectId( spoonObjectId );
    spoonLoggingObject.setLogLevel( executionConfiguration.getLogLevel() );
    job = new Job( spoon.rep, runJobMeta, spoonLoggingObject );
    job.setLogLevel( executionConfiguration.getLogLevel() );
    job.shareVariablesWith( jobMeta );
    job.setInteractive( true );
    job.setGatheringMetrics( executionConfiguration.isGatheringMetrics() );
    job.setArguments( executionConfiguration.getArgumentStrings() );
    job.getExtensionDataMap().putAll( executionConfiguration.getExtensionOptions() );
    job.addJobEntryListener( createRefreshJobEntryListener() );
      runJobMeta.findJobEntry( executionConfiguration.getStartCopyName(), executionConfiguration
       .getStartCopyNr(), false );
     job.setStartJobEntryCopy( startJobEntryCopy );
    Set<String> keys = paramMap.keySet();
    for ( String key : keys ) {
     job.getJobMeta().setParameterValue( key, Const.NVL( paramMap.get( key ), "" ) );

代码示例来源:origin: pentaho/pentaho-kettle

final Job job = new Job( repository, jobMeta, servletLoggingObject );
job.initializeVariablesFrom( null );
job.getJobMeta().setInternalKettleVariables( job );
job.injectVariables( jobConfiguration.getJobExecutionConfiguration().getVariables() );
job.setArguments( jobExecutionConfiguration.getArgumentStrings() );
job.copyParametersFrom( jobMeta );
job.clearParameters();
String[] parameterNames = job.listParameters();
for ( int idx = 0; idx < parameterNames.length; idx++ ) {
job.setSocketRepository( CarteSingleton.getInstance().getSocketRepository() );
CarteSingleton.getInstance().getJobMap().addJob( job.getJobname(), carteObjectId, job, jobConfiguration );
 job.addJobListener( new JobAdapter() {
  public void jobFinished( Job job ) {
   repository.disconnect();

代码示例来源:origin: pentaho/pentaho-kettle

LogLevel transLogLevel = parentJob.getLogLevel();
String[] args1 = arguments;
if ( args1 == null || args1.length == 0 ) { // No arguments set, look at the parent job.
 args1 = parentJob.getArguments();
 && !parentJob.isStopped() ) {
   try {
    ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.SpoonTransBeforeStart.id, new Object[] {
     executionConfiguration, parentJob.getJobMeta(), transMeta, rep
    } );
    List<Object> items = Arrays.asList( runConfiguration, false );
         .RunConfigurationSelection.id, items );
     if ( waitingToFinish && (Boolean) items.get( IS_PENTAHO ) ) {
      String jobName = parentJob.getJobMeta().getName();
      String name = transMeta.getName();
      logBasic( BaseMessages.getString( PKG, "JobTrans.Log.InvalidRunConfigurationCombination", jobName,
    remoteSlaveServer = parentJob.getJobMeta().findSlaveServer( realRemoteSlaveServerName );
    if ( remoteSlaveServer == null ) {
     throw new KettleException( BaseMessages.getString(
   if ( parentJob.getJobMeta().isBatchIdPassed() ) {
    executionConfiguration.setPassedBatchId( parentJob.getPassedBatchId() );
   if ( parentJob.getJobMeta().isBatchIdPassed() ) {
    executionConfiguration.setPassedBatchId( parentJob.getPassedBatchId() );

代码示例来源:origin: pentaho/pentaho-kettle

final Job job = new Job( repository, jobMeta, servletLoggingObject );
job.initializeVariablesFrom( null );
job.getJobMeta().setMetaStore( jobMap.getSlaveServerConfig().getMetaStore() );
job.getJobMeta().setInternalKettleVariables( job );
job.injectVariables( jobConfiguration.getJobExecutionConfiguration().getVariables() );
job.setArguments( jobExecutionConfiguration.getArgumentStrings() );
job.setSocketRepository( getSocketRepository() );
 int startCopyNr = jobExecutionConfiguration.getStartCopyNr();
 JobEntryCopy startJobEntryCopy = jobMeta.findJobEntry( startCopyName, startCopyNr, false );
 job.setStartJobEntryCopy( startJobEntryCopy );
 job.addDelegationListener( new CarteDelegationHandler( getTransformationMap(), getJobMap() ) );
 job.addJobListener( new JobAdapter() {
  public void jobFinished( Job job ) {
   repository.disconnect();
getJobMap().addJob( job.getJobname(), carteObjectId, job, jobConfiguration );
 job.setPassedBatchId( passedBatchId );

代码示例来源:origin: pentaho/pentaho-kettle

final Job job = new Job( repository, jobMeta, servletLoggingObject );
job.initializeVariablesFrom( null );
job.getJobMeta().setInternalKettleVariables( job );
job.injectVariables( jobConfiguration.getJobExecutionConfiguration().getVariables() );
job.copyParametersFrom( jobMeta );
job.clearParameters();
job.setSocketRepository( getSocketRepository() );
jobMap.addJob( job.getJobname(), carteObjectId, job, jobConfiguration );
job.addJobListener( new JobAdapter() {
 public void jobFinished( Job job ) {
  repository.disconnect();
String message = "Job '" + job.getJobname() + "' was added to the list with id " + carteObjectId;
logBasic( message );
 String logging = KettleLogStore.getAppender().getBuffer( job.getLogChannelId(), false ).toString();
 throw new KettleException( "Error executing Job: " + logging, executionException );

代码示例来源:origin: pentaho/big-data-plugin

appender = LogWriter.createFileAppender( logFileName, true, false );
 LogWriter.getInstance().addAppender( appender );
 log.setLogLevel( parentJob.getLogLevel() );
} catch ( Exception e ) {
 logError( BaseMessages
    logDetailed( BaseMessages
     .getString( JobEntryHadoopJobExecutor.class, "JobEntryHadoopJobExecutor.Blocking", mainClass ) );
   } while ( !parentJob.isStopped() && !done );
   if ( !done ) {
    mapReduceJobSimple.killJob();
    if ( parentJob.isStopped() && !mapReduceJobAdvanced.isComplete() ) {
  new ResultFile( ResultFile.FILE_TYPE_LOG, appender.getFile(), parentJob.getJobname(), getName() );
 result.getResultFiles().put( resultFile.getFile().toString(), resultFile );

代码示例来源:origin: pentaho/pentaho-kettle

@Test
public void testRunWithException() throws Exception {
 when( mockJob.isStopped() ).thenReturn( false );
 when( mockJob.getParentJob() ).thenReturn( parentJob );
 when( mockJob.getJobMeta() ).thenReturn( mockJobMeta );
 when( parentJob.isStopped() ).thenReturn( false );
 doThrow( KettleException.class ).when( mockJob ).execute( anyInt(), any( Result.class ) );
 jobRunner.run();
 verify( mockResult, times( 1 ) ).setNrErrors( Mockito.anyInt() );
 //[PDI-14981] catch more general exception to prevent thread hanging
 doThrow( Exception.class ).when( mockJob ).fireJobFinishListeners();
 jobRunner.run();
}

代码示例来源:origin: pentaho/pentaho-kettle

public void run() {
 try {
  if ( job.isStopped() || ( job.getParentJob() != null && job.getParentJob().isStopped() ) ) {
   return;
  job.fireJobStartListeners(); // Fire the start listeners
  result = job.execute( entryNr + 1, result );
 } catch ( KettleException e ) {
  e.printStackTrace();
 } finally {
  job.setResult( result );
  try {
   ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.JobFinish.id, getJob() );
   job.getJobMeta().disposeEmbeddedMetastoreProvider();
   log.logDebug( BaseMessages.getString( PKG, "Job.Log.DisposeEmbeddedMetastore" ) );
   job.fireJobFinishListeners();
   log.logError( BaseMessages.getString( PKG, "Job.Log.ErrorExecJob", e.getMessage() ), e );
  job.setFinished( true );

代码示例来源:origin: pentaho/pentaho-kettle

data.executorJob.shareVariablesWith( data.executorJobMeta );
data.executorJob.setParentTrans( getTrans() );
data.executorJob.setLogLevel( getLogLevel() );
data.executorJob.setInternalKettleVariables( this );
data.executorJob.copyParametersFrom( data.executorJobMeta );
data.executorJob.setArguments( getTrans().getArguments() );
data.executorJob.setSourceRows( data.groupBuffer );
data.executorJob.beginProcessing();
 result = data.executorJob.execute( 0, result );
} catch ( KettleException e ) {
 log.logError( "An error occurred executing the job: ", e );
 try {
  ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.JobFinish.id, data.executorJob );
  getExecutorJob().getJobMeta().disposeEmbeddedMetastoreProvider();
  log.logDebug( BaseMessages.getString( PKG, "JobExecutor.Log.DisposeEmbeddedMetastore" ) );
  data.executorJob.fireJobFinishListeners();
 } catch ( KettleException e ) {
  result.setNrErrors( 1 );
  String channelId = data.executorJob.getLogChannelId();
  String logText = KettleLogStore.getAppender().getBuffer( channelId, false ).toString();
  outputRow[idx++] = logText;
  outputRow[idx++] = data.executorJob.getLogChannelId();

代码示例来源:origin: pentaho/pentaho-kettle

TopLevelResource topLevelResource = ResourceUtil.serializeResourceExportInterface( exportRepo, job.getJobMeta(), job, repository, getMetaStore() );
 String launchFile = topLevelResource.getResourceName();
 String message = ResourceUtil.getExplanation( exportRepo, launchFile, job.getJobMeta() );
 System.out.println();
 System.out.println( message );
job.setArguments( arguments != null ? arguments : null );
job.initializeVariablesFrom( null );
job.setLogLevel( getLog().getLogLevel() );
job.getJobMeta().setInternalKettleVariables( job );
job.setRepository( repository );
job.getJobMeta().setRepository( repository );
job.getJobMeta().setMetaStore( getMetaStore() );
String[] jobParams = job.getJobMeta().listParameters();
for ( String param : jobParams ) {
 String value = params.getParameterValue( param );
 if ( value != null ) {
  job.getJobMeta().setParameterValue( param, value );
job.copyParametersFrom( job.getJobMeta() );
job.activateParameters();
 String optionValue = customParams.getParameterValue( optionName );
 if ( optionName != null && optionValue != null ) {
  job.getExtensionDataMap().put( optionName, optionValue );
job.start(); // Execute the selected job.

代码示例来源:origin: pentaho/pentaho-kettle

when( mockHttpServletResponse.getOutputStream() ).thenReturn( outMock );
when( mockJobMap.findJob( id ) ).thenReturn( mockJob );
PowerMockito.when( mockJob.getJobname() ).thenReturn( ServletTestUtils.BAD_STRING_TO_TEST );
PowerMockito.when( mockJob.getLogChannel() ).thenReturn( mockLogChannelInterface );
PowerMockito.when( mockJob.getJobMeta() ).thenReturn( mockJobMeta );
PowerMockito.when( mockJob.isFinished() ).thenReturn( true );
PowerMockito.when( mockJob.getLogChannelId() ).thenReturn( logId );
PowerMockito.when( mockJobMeta.getMaximum() ).thenReturn( new Point( 10, 10 ) );
when( mockJob.getStatus() ).thenReturn( "Finished" );
verify( mockJob, times( 1 ) ).getLogChannel();

代码示例来源:origin: pentaho/pentaho-kettle

@Before
public void setUp() throws Exception {
 job = new Job( null, new JobMeta() );
 entry = new JobEntryFolderIsEmpty();
 job.getJobMeta().addJobEntry( new JobEntryCopy( entry ) );
 entry.setParentJob( job );
 JobMeta mockJobMeta = mock( JobMeta.class );
 entry.setParentJobMeta( mockJobMeta );
 job.setStopped( false );
 File dir = Files.createTempDirectory( "dir", new FileAttribute<?>[0] ).toFile();
 dir.deleteOnExit();
 emptyDir = dir.getPath();
 dir = Files.createTempDirectory( "dir", new FileAttribute<?>[0] ).toFile();
 dir.deleteOnExit();
 nonEmptyDir = dir.getPath();
 File file = File.createTempFile( "existingFile", "ext", dir );
 file.deleteOnExit();
}

代码示例来源:origin: pentaho/pentaho-kettle

@Test
 @PrepareForTest( { Encode.class } )
 public void testStopJobServletEscapesHtmlWhenTransFound() throws ServletException, IOException {
  KettleLogStore.init();
  HttpServletRequest mockHttpServletRequest = mock( HttpServletRequest.class );
  HttpServletResponse mockHttpServletResponse = mock( HttpServletResponse.class );
  Job mockJob = mock( Job.class );
  JobMeta mockJobMeta = mock( JobMeta.class );
  LogChannelInterface mockLogChannelInterface = mock( LogChannelInterface.class );
  mockJob.setName( ServletTestUtils.BAD_STRING_TO_TEST );
  StringWriter out = new StringWriter();
  PrintWriter printWriter = new PrintWriter( out );

  PowerMockito.spy( Encode.class );
  when( mockHttpServletRequest.getContextPath() ).thenReturn( StopJobServlet.CONTEXT_PATH );
  when( mockHttpServletRequest.getParameter( anyString() ) ).thenReturn( ServletTestUtils.BAD_STRING_TO_TEST );
  when( mockHttpServletResponse.getWriter() ).thenReturn( printWriter );
  when( mockJobMap.getJob( any( CarteObjectEntry.class ) ) ).thenReturn( mockJob );
  when( mockJob.getLogChannelId() ).thenReturn( ServletTestUtils.BAD_STRING_TO_TEST );
  when( mockJob.getLogChannel() ).thenReturn( mockLogChannelInterface );
  when( mockJob.getJobMeta() ).thenReturn( mockJobMeta );
  when( mockJobMeta.getMaximum() ).thenReturn( new Point( 10, 10 ) );

  stopJobServlet.doGet( mockHttpServletRequest, mockHttpServletResponse );
  assertFalse( ServletTestUtils.hasBadText( ServletTestUtils.getInsideOfTag( "H1", out.toString() ) ) );

  PowerMockito.verifyStatic( atLeastOnce() );
  Encode.forHtml( anyString() );
 }
}

相关文章