blob: a9eb08320554f40b29f882508fbbc26f753dd06f [file] [log] [blame]
/*******************************************************************************
* Copyright (c) 2008 empolis GmbH and brox IT Solutions GmbH. All rights reserved. This program and the accompanying
* materials are made available under the terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*
* Contributors: Thomas Menzel (brox IT Solution GmbH) - initial creator
*******************************************************************************/
package org.eclipse.smila.processing.worker.test;
import java.util.Collection;
import org.eclipse.smila.datamodel.AnyMap;
import org.eclipse.smila.datamodel.AnySeq;
import org.eclipse.smila.datamodel.DataFactory;
import org.eclipse.smila.datamodel.Record;
import org.eclipse.smila.objectstore.StoreObject;
import org.eclipse.smila.processing.worker.PipelineProcessorWorker;
import org.eclipse.smila.taskmanager.BulkInfo;
import org.eclipse.smila.taskworker.input.RecordInput;
/**
* more tests for the pipeline processor worker.
*/
public class TestPipelineProcessorWorker2 extends ProcessingWorkerTestBase {
private static final String BPEL_TEST_PIPELINE = "TestPipeline3";
/**
* @param workflowName
* @param pipelineRunBulkSize
* @param jobName
* @throws Exception
*/
@Override
protected void createJob(final String workflowName, final String jobName) throws Exception {
final AnyMap parameters = DataFactory.DEFAULT.createAnyMap();
parameters.put(PipelineProcessorWorker.KEY_PIPELINERUN_BULKSIZE, 5);
parameters.put(PipelineProcessorWorker.KEY_PIPELINE_NAME, BPEL_TEST_PIPELINE);
parameters.put("tempStore", TEMP_STORE);
parameters.put("store", STORE);
addJob(jobName, workflowName, parameters);
}
/**
* tests that 2 instances of the same pipelet within same pipeline, the configs dont get mixed up.
*
* <h4>how this test works<h4>
* the testpipeline3 contains 2 instances of {@link BpelWorkerTestPipelet3}. This adds a seq named "bpel order" to the
* record. Each pipelet adds its sole config value contained in "param" to this SEQ. The test checks that the values
* are added with correct vlaues and in proper order.
* */
public void test_2IntancesOfSamePipeletInSamePipelineWithDiffConfig() throws Exception {
final String workflowName = "testWithOutputBucket";
final int noOfRecords = 10;
executeJob(workflowName, noOfRecords);
final Collection<StoreObject> storeObjects = _objectStoreService.getStoreObjectInfos(STORE, BUCKET_NAME);
assertEquals(1, storeObjects.size());
final BulkInfo bulkInfo = new BulkInfo(BUCKET_NAME, STORE, storeObjects.iterator().next().getId());
final RecordInput recordInput = new RecordInput(bulkInfo, _objectStoreService);
try {
for (Record record = recordInput.getRecord(); record != null; record = recordInput.getRecord()) {
final AnySeq order = record.getMetadata().getSeq("bpel order");
// this tests that the bpel pos X
assertEquals("bpel pos 1", order.getStringValue(0));
assertEquals("bpel pos 2", order.getStringValue(1));
// assertTrue(meta.containsValue("bpel pos 2"));
}
} finally {
recordInput.close();
_objectStoreService.removeObject(STORE, bulkInfo.getObjectName());
}
}
}