本文整理了Java中org.pentaho.di.core.database.Database.prepareSQL()
方法的一些代码示例,展示了Database.prepareSQL()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Database.prepareSQL()
方法的具体详情如下:
包路径:org.pentaho.di.core.database.Database
类名称:Database
方法名:prepareSQL
[英]Prepare a statement to be executed on the database. (does not return generated keys)
[中]准备要在数据库上执行的语句。(不返回生成的密钥)
代码示例来源:origin: pentaho/pentaho-kettle
/**
* Prepare a statement to be executed on the database. (does not return generated keys)
*
* @param sql The SQL to be prepared
* @return The PreparedStatement object.
* @throws KettleDatabaseException
*/
public PreparedStatement prepareSQL( String sql ) throws KettleDatabaseException {
return prepareSQL( sql, false );
}
代码示例来源:origin: pentaho/pentaho-kettle
private PreparedStatement getPreparedStatement( String sql ) throws KettleDatabaseException {
PreparedStatement ps = sqlMap.get( sql );
if ( ps == null ) {
ps = database.prepareSQL( sql );
sqlMap.putIfAbsent( sql, ps );
}
return ps;
}
代码示例来源:origin: pentaho/pentaho-kettle
public synchronized void setLookupJobAttribute() throws KettleException {
String sql =
"SELECT "
+ quote( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_VALUE_STR ) + ", "
+ quote( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_VALUE_NUM ) + " FROM "
+ databaseMeta.getQuotedSchemaTableCombination( null, KettleDatabaseRepository.TABLE_R_JOB_ATTRIBUTE )
+ " WHERE " + quote( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB ) + " = ? AND "
+ quote( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_CODE ) + " = ? AND "
+ KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_NR + " = ? ";
psJobAttributesLookup = database.prepareSQL( sql );
}
代码示例来源:origin: pentaho/pentaho-kettle
public synchronized void setLookupTransAttribute() throws KettleException {
String sql =
"SELECT "
+ quote( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_VALUE_STR )
+ ", "
+ quote( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_VALUE_NUM )
+ " FROM "
+ databaseMeta
.getQuotedSchemaTableCombination( null, KettleDatabaseRepository.TABLE_R_TRANS_ATTRIBUTE )
+ " WHERE " + quote( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANSFORMATION ) + " = ? AND "
+ quote( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_CODE ) + " = ? AND "
+ KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_NR + " = ? ";
psTransAttributesLookup = database.prepareSQL( sql );
}
代码示例来源:origin: pentaho/pentaho-kettle
public synchronized void setLookupStepAttribute() throws KettleException {
String sql =
"SELECT "
+ quote( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_VALUE_STR ) + ", "
+ quote( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_VALUE_NUM ) + " FROM "
+ databaseMeta.getQuotedSchemaTableCombination( null, KettleDatabaseRepository.TABLE_R_STEP_ATTRIBUTE )
+ " WHERE " + quote( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP ) + " = ? AND "
+ quote( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_CODE ) + " = ? AND "
+ quote( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_NR ) + " = ? ";
psStepAttributesLookup = database.prepareSQL( sql );
}
代码示例来源:origin: pentaho/pentaho-kettle
/**
* Prepare inserting values into a table, using the fields & values in a Row
*
* @param rowMeta The metadata row to determine which values need to be inserted
* @param schemaName The name of the schema in which we want to insert rows
* @param tableName The name of the table in which we want to insert rows
* @throws KettleDatabaseException if something went wrong.
*/
public void prepareInsert( RowMetaInterface rowMeta, String schemaName, String tableName )
throws KettleDatabaseException {
if ( rowMeta.size() == 0 ) {
throw new KettleDatabaseException( "No fields in row, can't insert!" );
}
String ins = getInsertStatement( schemaName, tableName, rowMeta );
if ( log.isDetailed() ) {
log.logDetailed( "Preparing statement: " + Const.CR + ins );
}
prepStatementInsert = prepareSQL( ins );
}
代码示例来源:origin: pentaho/pentaho-kettle
public synchronized void setLookupJobEntryAttribute() throws KettleException {
String sql =
"SELECT "
+ quote( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_VALUE_STR )
+ ", "
+ quote( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_VALUE_NUM )
+ " FROM "
+ databaseMeta.getQuotedSchemaTableCombination(
null, KettleDatabaseRepository.TABLE_R_JOBENTRY_ATTRIBUTE ) + " WHERE "
+ quote( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY ) + " = ? AND "
+ quote( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_CODE ) + " = ? AND "
+ quote( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_NR ) + " = ? ";
pstmt_entry_attributes = database.prepareSQL( sql );
}
代码示例来源:origin: pentaho/pentaho-kettle
public Long getNextBatchIdUsingAutoIncSQL( String autoIncSQL, DatabaseMeta dbm, Database ldb ) throws KettleDatabaseException {
Long rtn = null;
PreparedStatement stmt = ldb.prepareSQL( autoIncSQL, true );
try {
stmt.executeUpdate();
RowMetaAndData rmad = ldb.getGeneratedKeys( stmt );
if ( rmad.getRowMeta().size() > 0 ) {
rtn = rmad.getRowMeta().getInteger( rmad.getData(), 0 );
} else {
throw new KettleDatabaseException( "Unable to retrieve value of auto-generated technical key : "
+ "no value found!" );
}
} catch ( KettleValueException kve ) {
throw new KettleDatabaseException( kve );
} catch ( SQLException sqlex ) {
throw new KettleDatabaseException( sqlex );
} finally {
try {
stmt.close();
} catch ( SQLException ignored ) {
// Ignored
}
}
return rtn;
}
代码示例来源:origin: pentaho/pentaho-kettle
PreparedStatement ps = data.db.prepareSQL( sql );
代码示例来源:origin: pentaho/pentaho-kettle
public synchronized ObjectId insertTransAttribute( ObjectId id_transformation, long nr, String code,
long value_num, String value_str ) throws KettleException {
ObjectId id = getNextTransAttributeID();
RowMetaAndData table = new RowMetaAndData();
table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANS_ATTRIBUTE ), id );
table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANSFORMATION ),
id_transformation );
table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_NR ), new Long( nr ) );
table.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_CODE ), code );
table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_VALUE_NUM ),
new Long( value_num ) );
table.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_VALUE_STR ), value_str );
/*
* If we have prepared the insert, we don't do it again. We asume that all the step insert statements come one after
* the other.
*/
if ( psTransAttributesInsert == null ) {
String sql =
database.getInsertStatement( KettleDatabaseRepository.TABLE_R_TRANS_ATTRIBUTE, table.getRowMeta() );
psTransAttributesInsert = database.prepareSQL( sql );
}
database.setValues( table, psTransAttributesInsert );
database.insertRow( psTransAttributesInsert, useBatchProcessing );
if ( log.isDebug() ) {
log.logDebug( "saved transformation attribute [" + code + "]" );
}
return id;
}
代码示例来源:origin: pentaho/pentaho-kettle
public synchronized ObjectId insertJobAttribute( ObjectId id_job, long nr, String code, long value_num,
String value_str ) throws KettleException {
ObjectId id = getNextJobAttributeID();
// System.out.println("Insert job attribute : id_job="+id_job+", code="+code+", value_str="+value_str);
RowMetaAndData table = new RowMetaAndData();
table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB_ATTRIBUTE ), id );
table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB ), id_job );
table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_NR ), new Long( nr ) );
table.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_CODE ), code );
table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_VALUE_NUM ),
new Long( value_num ) );
table.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_VALUE_STR ), value_str );
/*
* If we have prepared the insert, we don't do it again. We asume that all the step insert statements come one after
* the other.
*/
if ( psJobAttributesInsert == null ) {
String sql =
database.getInsertStatement( KettleDatabaseRepository.TABLE_R_JOB_ATTRIBUTE, table.getRowMeta() );
psJobAttributesInsert = database.prepareSQL( sql );
}
database.setValues( table, psJobAttributesInsert );
database.insertRow( psJobAttributesInsert, useBatchProcessing );
if ( log.isDebug() ) {
log.logDebug( "saved job attribute [" + code + "]" );
}
return id;
}
代码示例来源:origin: pentaho/pentaho-kettle
public synchronized ObjectId insertStepAttribute( ObjectId id_transformation, ObjectId id_step, long nr,
String code, double value_num, String value_str )
throws KettleException {
ObjectId id = getNextStepAttributeID();
RowMetaAndData table = new RowMetaAndData();
table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP_ATTRIBUTE ), id );
table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_TRANSFORMATION ),
id_transformation );
table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP ), id_step );
table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_NR ), new Long( nr ) );
table.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_CODE ), code );
table.addValue( new ValueMetaNumber( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_VALUE_NUM ),
new Double( value_num ) );
table.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_VALUE_STR ), value_str );
/*
* If we have prepared the insert, we don't do it again. We assume that all the step insert statements come one
* after the other.
*/
if ( psStepAttributesInsert == null ) {
String sql =
database.getInsertStatement( KettleDatabaseRepository.TABLE_R_STEP_ATTRIBUTE, table.getRowMeta() );
psStepAttributesInsert = database.prepareSQL( sql );
}
database.setValues( table, psStepAttributesInsert );
database.insertRow( psStepAttributesInsert, useBatchProcessing );
if ( log.isDebug() ) {
log.logDebug( "saved attribute [" + code + "]" );
}
return id;
}
代码示例来源:origin: pentaho/pentaho-kettle
data.insertStatement = data.db.prepareSQL( sql );
data.preparedStatements.put( data.realSchemaTable + "insert", data.insertStatement );
data.lookupStatement = data.db.prepareSQL( sql );
data.preparedStatements.put( data.realSchemaTable + "lookup", data.lookupStatement );
String sql = getUpdateStatement( data.inputRowMeta );
data.updateStatement = data.db.prepareSQL( sql );
data.preparedStatements.put( data.realSchemaTable + "update", data.updateStatement );
if ( log.isDebug() ) {
data.deleteStatement = data.db.prepareSQL( sql );
data.preparedStatements.put( data.realSchemaTable + "delete", data.deleteStatement );
if ( log.isDebug() ) {
代码示例来源:origin: pentaho/pentaho-kettle
logDetailed( "Prepared statement : " + sql );
insertStatement = data.db.prepareSQL( sql, meta.isReturningGeneratedKeys() );
data.preparedStatements.put( tableName, insertStatement );
代码示例来源:origin: pentaho/pentaho-kettle
PreparedStatement ps = db.prepareSQL( FILEBulkFile );
ps.execute();
代码示例来源:origin: pentaho/pentaho-kettle
data.lookupStatement = data.db.prepareSQL( sql );
data.preparedStatements.put( data.realSchemaTable + "lookup", data.lookupStatement );
data.insertStatement = data.db.prepareSQL( sql );
data.preparedStatements.put( data.realSchemaTable + "insert", data.insertStatement );
String sql = getUpdateStatement( data.inputRowMeta );
data.updateStatement = data.db.prepareSQL( sql );
data.preparedStatements.put( data.realSchemaTable + "update", data.updateStatement );
if ( log.isDebug() ) {
String sql = getDeleteStatement( data.inputRowMeta );
data.deleteStatement = data.db.prepareSQL( sql );
data.preparedStatements.put( data.realSchemaTable + "delete", data.deleteStatement );
if ( log.isDebug() ) {
代码示例来源:origin: pentaho/pentaho-kettle
data.pstmt = data.db.prepareSQL( sql );
if ( log.isDebug() ) {
logDebug( BaseMessages.getString( PKG, "DatabaseJoin.Log.SQLStatement", sql ) );
内容来源于网络,如有侵权,请联系作者删除!