Skip to content

Commit

Permalink
[TEST] failing tests
Browse files Browse the repository at this point in the history
LookupReferencesExceptionTest.testObjectTypePairsToString,
JsonOutputTest.testNonCompatibilityMode depended on map order
JobMetaTest.testLookupRepositoryReferences was rigged to fail, inferring
test from impl
XsdValidatorIntTest failing in tests-trans, moved resources
params in tests-trans javac
  • Loading branch information
tgf committed Oct 8, 2015
1 parent 71a6dfa commit 32d8256
Show file tree
Hide file tree
Showing 7 changed files with 87 additions and 101 deletions.
4 changes: 2 additions & 2 deletions build.xml
Original file line number Diff line number Diff line change
Expand Up @@ -582,8 +582,8 @@
<javac fork="true"
srcdir="${test}"
destdir="${testClasses}"
target="7"
source="6"
target="${javac.target}"
source="${javac.source}"
deprecation="true"
debug="true"
includeantruntime="false"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
import org.junit.Test;
import org.pentaho.di.repository.RepositoryObjectType;

import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;

import static org.junit.Assert.assertEquals;
Expand All @@ -42,11 +42,11 @@ public class LookupReferencesExceptionTest {
public void testObjectTypePairsToString() throws Exception {
Exception cause = new NullPointerException();

Map<String, RepositoryObjectType> notFoundedReferences = new HashMap<String, RepositoryObjectType>();
String pathToJobStub = "/path/Job.ktr";
Map<String, RepositoryObjectType> notFoundedReferences = new LinkedHashMap<String, RepositoryObjectType>();
String pathToTransStub = "/path/Trans.ktr";
notFoundedReferences.put( pathToJobStub, RepositoryObjectType.JOB );
String pathToJobStub = "/path/Job.ktr";
notFoundedReferences.put( pathToTransStub, RepositoryObjectType.TRANSFORMATION );
notFoundedReferences.put( pathToJobStub, RepositoryObjectType.JOB );

String expectedOutput =
System.lineSeparator() + "\"/path/Trans.ktr\" [transformation] " + System.lineSeparator()
Expand Down
19 changes: 15 additions & 4 deletions engine/test-src/org/pentaho/di/job/JobMetaTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,12 @@
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.pentaho.di.core.exception.IdNotFoundException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.exception.LookupReferencesException;
import org.pentaho.di.core.listeners.ContentChangedListener;
import org.pentaho.di.job.entries.empty.JobEntryEmpty;
import org.pentaho.di.job.entries.trans.JobEntryTrans;
import org.pentaho.di.job.entry.JobEntryBase;
import org.pentaho.di.job.entry.JobEntryCopy;
import org.pentaho.di.repository.Repository;

Expand Down Expand Up @@ -106,7 +107,8 @@ public void testContentChangeListener() throws Exception {
verifyNoMoreInteractions( listener );
}

@Test public void testLookupRepositoryReferences() throws Exception {
@Test
public void testLookupRepositoryReferences() throws Exception {
JobMeta jobMetaMock = mock( JobMeta.class );
doCallRealMethod().when( jobMetaMock ).lookupRepositoryReferences( any( Repository.class) );
doCallRealMethod().when( jobMetaMock ).addJobEntry( anyInt(), any( JobEntryCopy.class ) );
Expand All @@ -119,7 +121,8 @@ public void testContentChangeListener() throws Exception {

JobEntryTrans brokenJobEntryMock = mock(JobEntryTrans.class);
when(brokenJobEntryMock.hasRepositoryReferences()).thenReturn( true );
doThrow( new RuntimeException() ).when( brokenJobEntryMock ).lookupRepositoryReferences( any( Repository.class ) );
doThrow( mock( IdNotFoundException.class ) )
.when( brokenJobEntryMock ).lookupRepositoryReferences( any( Repository.class ) );

JobEntryCopy jobEntryCopy1 = mock( JobEntryCopy.class );
when( jobEntryCopy1.getEntry() ).thenReturn( jobEntryMock );
Expand All @@ -133,7 +136,15 @@ public void testContentChangeListener() throws Exception {
when( jobEntryCopy3.getEntry() ).thenReturn( jobEntryMock );
jobMetaMock.addJobEntry( 2, jobEntryCopy3 );

jobMetaMock.lookupRepositoryReferences( any( Repository.class ) );
Repository repo = mock( Repository.class );
try {
jobMetaMock.lookupRepositoryReferences( repo );
Assert.fail( "no exception for broken entry" );
} catch ( LookupReferencesException e ) {
// ok
}

verify( jobEntryMock, times(2) ).lookupRepositoryReferences( any( Repository.class) );

}
}
24 changes: 18 additions & 6 deletions test/org/pentaho/di/trans/steps/jsonoutput/JsonOutputTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,9 @@
import junit.framework.TestCase;

import org.apache.commons.io.FileUtils;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.map.ObjectMapper;
import org.junit.Assert;
import org.mockito.Mockito;
import org.pentaho.di.TestUtilities;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.KettleEnvironment;
Expand All @@ -40,8 +41,9 @@
import org.pentaho.di.core.plugins.StepPluginType;
import org.pentaho.di.core.row.RowMeta;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMeta;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.core.row.value.ValueMetaInteger;
import org.pentaho.di.core.row.value.ValueMetaString;
import org.pentaho.di.trans.RowStepCollector;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransHopMeta;
Expand Down Expand Up @@ -177,7 +179,7 @@ public List<RowMetaAndData> createResultData1() {
public RowMetaInterface createRowMetaInterface() {
RowMetaInterface rowMetaInterface = new RowMeta();

ValueMetaInterface[] valuesMeta = { new ValueMeta( "filename", ValueMeta.TYPE_STRING ), };
ValueMetaInterface[] valuesMeta = { new ValueMetaString( "filename" ), };
for ( int i = 0; i < valuesMeta.length; i++ ) {
rowMetaInterface.addValueMeta( valuesMeta[i] );
}
Expand Down Expand Up @@ -210,8 +212,8 @@ public RowMetaInterface createResultRowMetaInterface() {

ValueMetaInterface[] valuesMeta =
{
new ValueMeta( "Id", ValueMeta.TYPE_INTEGER ), new ValueMeta( "State", ValueMeta.TYPE_STRING ),
new ValueMeta( "City", ValueMeta.TYPE_STRING ) };
new ValueMetaInteger( "Id" ), new ValueMetaString( "State" ),
new ValueMetaString( "City" ) };

for ( int i = 0; i < valuesMeta.length; i++ ) {
rowMetaInterface.addValueMeta( valuesMeta[i] );
Expand Down Expand Up @@ -334,7 +336,7 @@ public String test( boolean compatibilityMode ) throws Exception {

public void testNonCompatibilityMode() throws Exception {
String jsonStructure = test( false );
Assert.assertEquals( EXPECTED_NON_COMPATIBILITY_JSON, jsonStructure );
Assert.assertTrue( jsonEquals( EXPECTED_NON_COMPATIBILITY_JSON, jsonStructure ) );
}

public void testCompatibilityMode() throws Exception {
Expand All @@ -359,4 +361,14 @@ public void testNpeIsNotThrownOnNullInput() throws Exception {

step.processRow( mockHelper.processRowsStepMetaInterface, mockHelper.processRowsStepDataInterface );
}

/**
* compare json (deep equals ignoring order)
*/
protected boolean jsonEquals( String json1, String json2 ) throws Exception {
ObjectMapper om = new ObjectMapper();
JsonNode parsedJson1 = om.readTree( json1 );
JsonNode parsedJson2 = om.readTree( json2 );
return parsedJson1.equals( parsedJson2 );
}
}
133 changes: 48 additions & 85 deletions test/org/pentaho/di/trans/steps/xsdvalidator/XsdValidatorIntTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -25,24 +25,22 @@
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;

import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.List;

import org.apache.commons.vfs2.FileObject;
import org.apache.commons.vfs2.FileSystemException;
import org.apache.poi.util.IOUtils;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.pentaho.di.core.KettleEnvironment;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleFileException;
import org.pentaho.di.core.exception.KettleValueException;
import org.pentaho.di.core.row.RowMeta;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMetaInterface;
Expand All @@ -54,6 +52,7 @@
public class XsdValidatorIntTest {

private static final String RAMDIR = "ram://" + XsdValidatorIntTest.class.getSimpleName();
private static final String TEST_FILES_DIR = "testfiles/xsdvalidator/";
private static FileObject schemaRamFile = null;
private static FileObject dataRamFile = null;

Expand All @@ -77,92 +76,64 @@ public static void tearDownAfterClass() {
}

@Test
public void testVfsInputFiles() throws FileSystemException {
testVfsFileTypes( getDataRamFile().getURL().toString(), getSchemaRamFile().getURL().toString(), true );
testVfsFileTypes( getDataRamFile().getURL().toString(), getSchemaUrlFile().getURL().toString(), true );
testVfsFileTypes( getDataUrlFile().getURL().toString(), getSchemaRamFile().getURL().toString(), true );
testVfsFileTypes( getDataUrlFile().getURL().toString(), getSchemaUrlFile().getURL().toString(), true );
public void testVfsInputFiles() throws Exception {
testVfsFileTypes(
getDataRamFile().getURL().toString(),
getSchemaRamFile().getURL().toString(), true );
testVfsFileTypes( getDataRamFile().getURL().toString(), getSchemaFileUrl(), true );
testVfsFileTypes( getDataFileUrl(), getSchemaRamFile().getURL().toString(), true );
testVfsFileTypes( getDataFileUrl(), getSchemaFileUrl(), true );
}

private FileObject getSchemaRamFile() {
try {
if ( schemaRamFile != null && schemaRamFile.exists() && schemaRamFile.getContent().getSize() > 0 ) {
return schemaRamFile;
}
schemaRamFile = KettleVFS.getFileObject( RAMDIR + "/schema.xsd" );
if ( loadRamFile( this.getClass().getResourceAsStream( "schema.xsd" ), schemaRamFile ) ) {
return schemaRamFile;
}
} catch ( Exception e ) {
return null;
private FileObject getSchemaRamFile() throws Exception {
if ( schemaRamFile != null && schemaRamFile.exists() && schemaRamFile.getContent().getSize() > 0 ) {
return schemaRamFile;
}
return null;
schemaRamFile = loadRamFile( "schema.xsd" );
return schemaRamFile;
}

private FileObject getDataRamFile() {
try {
if ( dataRamFile != null && dataRamFile.exists() && dataRamFile.getContent().getSize() > 0 ) {
return dataRamFile;
}
dataRamFile = KettleVFS.getFileObject( RAMDIR + "/data.xml" );
if ( loadRamFile( this.getClass().getResourceAsStream( "data.xml" ), dataRamFile ) ) {
return dataRamFile;
}
} catch ( Exception e ) {
return null;
private FileObject getDataRamFile() throws Exception {
if ( dataRamFile != null && dataRamFile.exists() && dataRamFile.getContent().getSize() > 0 ) {
return dataRamFile;
}
return null;
dataRamFile = loadRamFile( "data.xml" );
return dataRamFile;
}

private FileObject getSchemaUrlFile() {
try {
return KettleVFS.getFileObject( this.getClass().getResource( "schema.xsd" ).toString() );
} catch ( KettleFileException e ) {
return null;
}
private String getFileUrl( String filename ) throws Exception {
File file = new File( TEST_FILES_DIR + filename );
return file.toURI().toURL().toExternalForm();
}
private InputStream getFileInputStream( String filename ) throws Exception {
File file = new File( TEST_FILES_DIR + filename );
return new FileInputStream( file );
}

private FileObject getDataUrlFile() {
try {
return KettleVFS.getFileObject( this.getClass().getResource( "data.xml" ).toString() );
} catch ( KettleFileException e ) {
return null;
}
private String getSchemaFileUrl() throws Exception {
return getFileUrl( "schema.xsd" );
}

private boolean loadRamFile( InputStream sourceStream, FileObject targetFile ) {
if ( sourceStream == null || targetFile == null ) {
return false;
}
boolean result = false;
OutputStream targetStream = null;
try {
targetStream = targetFile.getContent().getOutputStream();
IOUtils.copy( sourceStream, targetStream );
result = true;
} catch ( Exception e ) {
// Ignore, we'll return false anyways
}
try {
sourceStream.close();
if ( targetStream != null ) {
targetStream.close();
private String getDataFileUrl() throws Exception {
return getFileUrl( "data.xml" );
}

private FileObject loadRamFile( String filename ) throws Exception {
String targetUrl = RAMDIR + "/" + filename;
try ( InputStream source = getFileInputStream( filename ) ) {
FileObject fileObject = KettleVFS.getFileObject( targetUrl );
try ( OutputStream targetStream = fileObject.getContent().getOutputStream() ) {
IOUtils.copy( source, targetStream );
}
} catch ( Exception e ) {
// Ignore
return fileObject;
}
return result;
}

private void testVfsFileTypes( String dataFilename, String schemaFilename, boolean expected ) {
private void testVfsFileTypes( String dataFilename, String schemaFilename, boolean expected ) throws Exception {
assertNotNull( dataFilename );
assertNotNull( schemaFilename );
try {
assertTrue( KettleVFS.getFileObject( dataFilename ).exists() );
assertTrue( KettleVFS.getFileObject( schemaFilename ).exists() );
} catch ( Exception e ) {
fail();
}
assertTrue( KettleVFS.getFileObject( dataFilename ).exists() );
assertTrue( KettleVFS.getFileObject( schemaFilename ).exists() );

RowMetaInterface inputRowMeta = new RowMeta();
inputRowMeta.addValueMeta( new ValueMetaString( "DataFile" ) );
Expand All @@ -180,12 +151,8 @@ private void testVfsFileTypes( String dataFilename, String schemaFilename, boole
TransMeta transMeta = TransTestFactory.generateTestTransformation( null, meta, stepName );

List<RowMetaAndData> result = null;
try {
result = TransTestFactory.executeTestTransformation( transMeta, TransTestFactory.INJECTOR_STEPNAME, stepName,
TransTestFactory.DUMMY_STEPNAME, inputData );
} catch ( KettleException e ) {
fail();
}
result = TransTestFactory.executeTestTransformation( transMeta, TransTestFactory.INJECTOR_STEPNAME, stepName,
TransTestFactory.DUMMY_STEPNAME, inputData );

assertNotNull( result );
assertEquals( 1, result.size() );
Expand All @@ -199,12 +166,8 @@ private void testVfsFileTypes( String dataFilename, String schemaFilename, boole
assertEquals( "result", result.get( 0 ).getValueMeta( 2 ).getName() );

// Check result
try {
assertEquals( dataFilename, result.get( 0 ).getString( 0, "default" ) );
assertEquals( schemaFilename, result.get( 0 ).getString( 1, "default" ) );
assertEquals( expected, result.get( 0 ).getBoolean( 2, !expected ) );
} catch ( KettleValueException e ) {
fail();
}
assertEquals( dataFilename, result.get( 0 ).getString( 0, "default" ) );
assertEquals( schemaFilename, result.get( 0 ).getString( 1, "default" ) );
assertEquals( expected, result.get( 0 ).getBoolean( 2, !expected ) );
}
}
File renamed without changes.
File renamed without changes.

0 comments on commit 32d8256

Please sign in to comment.