My Mulesoft flows are being executed more than once - foreach

I'm developing a data migration application between two databases, using Mulesoft's Anypoint Platform.
I keep getting duplicate key errors, and while debugging I realized my flows are being executed more than once.
Here's a part of the code:
<flow name="docLogicFlow" doc:id="7e901506-e31d-4d1a-baf0-e54fef27f083" >
<flow-ref doc:name="DGD_TCONTENT Migration Flow Reference" doc:id="9139e02d-36b2-4c4c-a8bb-12fa1e372134" name="DGD_TCONTENTMigrationFlow" />
<flow-ref doc:name="DOC_TFILECONTENTTYPE Migration Flow Reference" doc:id="ef67a89a-ecd9-416b-9bc4-4a67a91efd67" name="DOC_TFILECONTENTTYPEMigrationFlow" />
<flow-ref doc:name="DOC_TFILESTORAGE Migration Flow Reference" doc:id="2adbb928-9f7e-44be-8a50-33bdceacfc2f" name="DOC_TFILESTORAGEMigrationFlow" />
</flow>
<flow name="docCleanFlow" doc:id="8278a9e1-64c7-4abc-8647-ce22c28f66c5" >
<db:delete doc:name="Delete" doc:id="bc7920fa-ed99-4fe1-9337-a061f5ec800b" config-ref="Database_Config">
<db:sql >DELETE FROM DGD_TCONTENT;
DELETE FROM DOC_TFILE;
DELETE FROM DOC_TFILECONTENTTYPE;
DELETE FROM DOC_TFILESTORAGE;</db:sql>
</db:delete>
</flow>
<flow name="DGD_TCONTENTMigrationFlow" doc:id="cbdcdcb7-a22c-4636-ba02-639f12882700" >
<db:select doc:name="Select DGD_TCONTENT from Oracle DB" doc:id="3b60f901-03f5-4213-8373-92b29abe6a3d" config-ref="MYCAREER_DEV_DB" >
<db:sql >SELECT * FROM DGD_TCONTENT</db:sql>
</db:select>
<batch:job jobName="DGD_TCONTENTMigrationBatchJob" doc:id="3317b3d8-582c-4f6c-82be-2d84ccdbd28f" >
<batch:process-records >
<batch:step name="DGD_TCONTENTMigrationBatchStep" doc:id="a4d82fbe-96c7-4ff8-8fc9-5614f2963e60" >
<batch:aggregator doc:name="DGD_TCONTENT Batch Aggregator" doc:id="64d3b1a3-bf3e-44c5-80f9-cf3f6a063cdf" size="20" >
<foreach doc:name="For Each" doc:id="1bea88df-b68a-4af9-9f7d-a0dbdf65d720" >
<db:stored-procedure doc:name="Insert into DGD_TCONTENT" doc:id="e942f30b-7f3c-4a1b-b175-2ad969ab39b9" config-ref="Database_Config">
<db:sql >{call InsertIntoContent (:CONTENT_ID, :CONTENT_TYPE, :CONTENT_EXT_ID, :CONTENT_TITLE, :CONTENT_SUMMARY, :CONTENT_URL, :CONTENT_FORMAT,
:CONTENT_OBSOLETE, :CONTENT_IMAGE_URL, :CONTENT_LANGUAGE, :CONTENT_DURATION, :CONTENT_DURATION_TYPE, :CONTENT_PROVIDER,
:CONTENT_INTERNAL, :CONTENT_CREATED_DATE, :CONTENT_MODIFIED_DATE)}</db:sql>
<db:input-parameters ><![CDATA[#[output application/java
---
{
CONTENT_ID : payload.content_id,
CONTENT_TYPE : payload.content_type,
CONTENT_EXT_ID : payload.content_ext_id,
CONTENT_TITLE : payload.content_title,
CONTENT_SUMMARY : payload.content_summary,
CONTENT_URL : payload.content_url,
CONTENT_FORMAT : payload.content_format,
CONTENT_OBSOLETE : payload.content_obsolete,
CONTENT_IMAGE_URL : payload.content_image_url,
CONTENT_LANGUAGE : payload.content_language,
CONTENT_DURATION : payload.content_duration,
CONTENT_DURATION_TYPE : payload.content_duration_type,
CONTENT_PROVIDER : payload.content_provider,
CONTENT_INTERNAL : payload.content_internal,
CONTENT_CREATED_DATE : payload.content_created_date,
CONTENT_MODIFIED_DATE : payload.content_modified_date,
}]]]></db:input-parameters>
</db:stored-procedure>
</foreach>
</batch:aggregator>
</batch:step>
</batch:process-records>
<batch:on-complete >
<logger level="INFO" doc:name="Logger" doc:id="42ae6570-e00d-4f0f-a2fa-3cd36ccc5a98" message="DGD_TCONTENT finished data migration."/>
</batch:on-complete>
</batch:job>
</flow>
<flow name="DOC_TFILEMigrationFlow" doc:id="40bf34a9-6b57-4640-9ebe-801f86da6331" >
<db:select doc:name="Select DOC_TFILE from Oracle DB" doc:id="c29cd80b-272f-4d92-97b6-9c1c8a214fb9" config-ref="MYCAREER_DEV_DB" >
<db:sql >SELECT * FROM DOC_TFILE</db:sql>
</db:select>
<batch:job jobName="MigrateDOC_TFILEBatchJob" doc:id="6cfb7fa7-eab5-4591-b188-10bd88f40efc" >
<batch:process-records >
<batch:step name="MigrateDOC_TFILEBatchStep" doc:id="82ab6f35-8867-46a7-9591-eb6f79ec64b4" >
<batch:aggregator doc:name="DOC_TFILE Batch Aggregator" doc:id="8c9277d0-65aa-4bba-9c28-3acf272f2936" size="20" >
<foreach doc:name="For Each" doc:id="9f50fb18-16b9-4be5-bf98-b7b024546ee8" >
<db:stored-procedure doc:name="Insert into DOC_TFILE" doc:id="6a7b3f09-284d-4bc5-91ea-945d02195395" config-ref="Database_Config">
<db:sql >{call InsertIntoFile (:FILE_CODE,:FILECONTENTTYPE_CODE,:FILE_NAME,:FILE_SIZE, :FILE_STATUS,:USER_CODE, :FILE_DATA,:FILE_STORAGE_VALUE,:FILE_UPLOAD_CODE,
:FILE_UPLOAD_TEMP,:FILESTORAGE_CODE,:FILE_DATE_INSERT,:FILE_DATE_UPDATE,:FILE_DATE_DELETE)}</db:sql>
<db:input-parameters ><![CDATA[#[output application/java
---
{
FILE_CODE : payload.file_code,
FILECONTENTTYPE_CODE : payload.filecontenttype_code,
FILE_NAME : payload.file_name,
FILE_SIZE : payload.file_size,
FILE_STATUS : payload.file_status,
USER_CODE : payload.user_code,
FILE_DATA : payload.file_data,
FILE_STORAGE_VALUE : payload.file_storage_value,
FILE_UPLOAD_CODE : payload.file_upload_code,
FILE_UPLOAD_TEMP : payload.file_upload_temp,
FILESTORAGE_CODE : payload.filestorage_code,
FILE_DATE_INSERT : payload.file_date_insert,
FILE_DATE_UPDATE : payload.file_date_upload,
CONTENT_CREATED_DATE : payload.content_created_date,
FILE_DATE_DELETE : payload.file_date_delete
}]]]></db:input-parameters>
</db:stored-procedure>
</foreach>
</batch:aggregator>
</batch:step>
</batch:process-records>
<batch:on-complete >
<logger level="INFO" doc:name="Logger" doc:id="c51d684a-f195-4758-84b8-355fc8cda3b5" message="DOC_TFILE finished data migration."/>
</batch:on-complete>
</batch:job>
</flow>
<flow name="DOC_TFILECONTENTTYPEMigrationFlow" doc:id="4447f39a-aabf-48df-ae81-9c23a9c17927" >
<db:select doc:name="Select DOC_TFILECONTENTTYPE from Oracle DB" doc:id="54a25f9f-d84f-4621-bc5b-b2578dcf4891" config-ref="MYCAREER_DEV_DB" >
<db:sql >SELECT * FROM DOC_TFILECONTENTTYPE</db:sql>
</db:select>
<batch:job jobName="MigrateDOC_TFILECONTENTTYPEBatchJob" doc:id="f895d255-e3f4-4139-8c9c-bdba1f292abf" >
<batch:process-records >
<batch:step name="MigrateDOC_TFILECONTENTTYPEBatchStep" doc:id="3ff4c3ff-659d-4efc-ad81-e3e3f42d0afb" >
<batch:aggregator doc:name="DOC_TFILECONTENTTYPE Batch Aggregator" doc:id="d7e56e72-172a-4322-acdd-a66a7e626224" size="20" >
<foreach doc:name="For Each" doc:id="d0e3b9ee-c820-4722-94cf-52ffabd14b04" >
<db:stored-procedure doc:name="Insert into DOC_TFILECONTENTTYPE" doc:id="0d873708-4611-42af-98c0-22f656bc5ec4" config-ref="Database_Config">
<db:sql >{call InsertIntoFileContentType (:CONTENTTYPE_CODE,:CONTENTTYPE_ID,:CONTENTTYPE_STATUS,:CONTENTTYPE_ICON,:CONTENTTYPE_NAME,
:CONTENTTYPE_DESCRIPTION)}</db:sql>
<db:input-parameters ><![CDATA[#[{
CONTENTTYPE_CODE : payload.contenttype_code,
CONTENTTYPE_ID : payload.contenttype_id,
CONTENTTYPE_STATUS : payload.contenttype_status,
CONTENTTYPE_ICON : payload.contenttype_icon,
CONTENTTYPE_NAME : payload.contenttype_name,
CONTENTTYPE_DESCRIPTION : payload.contenttype_description,
}]]]></db:input-parameters>
</db:stored-procedure>
</foreach>
</batch:aggregator>
</batch:step>
</batch:process-records>
<batch:on-complete >
<logger level="INFO" doc:name="Logger" doc:id="fd4797d9-bca0-4b97-bca4-b95dce09c2bb" message="DOC_TFILE_CONTENTTYPE finished data migration."/>
</batch:on-complete>
</batch:job>
</flow>
BusinessLogicFlow that calls DocLogicFlow:
<flow name="mainFlow" doc:id="7c0a5bef-b3d5-442f-bff3-10d038f69a5e">
<flow-ref doc:name="businesslogicFlow" doc:id="91360ede-4d71-44c7-9b64-8ee762e04ea0" name="businesslogicFlow" />
<error-handler>
<on-error-propagate enableNotifications="true" logException="true" doc:name="On Error Propagate" doc:id="488b507d-e26c-4c56-8759-8bb4f6645d71" type="ANY">
<flow-ref doc:name="errorHandlingFlow" doc:id="afdaf73c-0137-4d60-84f6-5c41234771a3" name="errorHandlingFlow" />
</on-error-propagate>
</error-handler>
</flow>
<flow name="businesslogicFlow" doc:id="5aa7011d-8abd-453d-9459-c7322838f14a" tracking:enable-default-events="true">
<db:select doc:name="Select" doc:id="58bc689c-b708-4b1b-b645-693735104a25" config-ref="MYCAREER_DEV_DB">
<db:sql >select 1 from dual</db:sql>
</db:select>
<batch:job jobName="template-db2db-account-migrationBatch_Job" doc:id="af55c5cf-807b-4582-9868-66f144b0a8e9">
<batch:process-records>
<batch:step name="Batch_Step" doc:id="428bb0a0-5082-451d-9253-2b6f0a147719" >
<flow-ref doc:name="Flow Reference" doc:id="ebf025cf-70b1-4145-8fd9-270d92c06420" name="docCleanFlow"/>
<logger level="INFO" doc:name="Logger" doc:id="92c2d1da-fb59-4cb3-b1e6-ac5e9ae28922" message="DELETE COMPLETED"/>
</batch:step>
</batch:process-records>
<batch:on-complete >
<flow-ref doc:name="Flow Reference" doc:id="9a331b54-55e4-4818-9050-cf70cc348581" name="docLogicFlow"/>
</batch:on-complete>
</batch:job>
Endpoint that calls businessLogicFlow:
<flow name="triggerFlow" doc:id="25a15396-5def-4f1c-bac2-6c7a769f4278" >
<http:listener doc:name="/migrate" doc:id="ca1efe4f-1e53-428a-b439-c1d905246a34" config-ref="HTTP_Listener_config" path="/migrate"/>
<flow-ref doc:name="mainFlow" doc:id="9c665dd4-7df3-4e0d-a1eb-01ac63781ce7" name="mainFlow"/>
<ee:transform doc:name="Build response" doc:id="2abddd58-c707-435a-a004-ec5ba9107429">
<ee:message>
<ee:set-payload><![CDATA[%dw 2.0
output application/json
{
Message: "Batch Process initiated",
ID: payload.id,
RecordCount: payload.recordCount,
StartExecutionOn: payload.creationTime as DateTime
}
]]>
Does anybody know why this is happening? And how to avoid it?
Thank you!

It looks that provided code is not the source of the duplication.
It could be something which is calling docLogicFlow
What flow calls it? Is it generated by some scheduler? Double check it - it may run flow again even previous flow is still working. Scheduler is indepnedent of flow exceution. It depends only on schedule. In general it could be many flows running in parallel from the same scehduler.
If it is not scheduler - check events what start this flow. My guess - these events come too often and your regular flow is still working. Look about Schedulers and Multiple Schedulers here https://simpleflatservice.com/mule4/Multipleschedules.html
This is good you are using database. You can use its transction mechanism to prevent duplicstion. Always be ready that something can call flow again and again and reject duplicated requests. It's easy to in the DB environment. Just establish any lock and reject/ignore duplicated requests.
P.S. Your third flow refernce is not linked to anything. This one
<flow-ref doc:name="DOC_TFILESTORAGE Migration Flow Reference" doc:id="0bda80b0-e330-4e86-b0f1-acfeb57e031a" name="DOC_TFILESTORAGEMigrationFlow" />

Related

Dynamic variable on Tsung XML config file

I've set up TSUNG (v1.7) to test my application, but I'm facing some problem using a dynamic variable on my http request. To be more precise I need to retrieve some data from a CSV file and inserting it in my request.
Reading the documentation it seems that I don't really need to write any sort of functions, since I'm using a version above the 1.3, so to achieve that I just need to specify the file path on the 'option' tag and use the 'setdynvars', but unfortunately it doesn't seems works (the web server response says that the content is empty). Any idea why?
<?xml version="1.0"?>
<!DOCTYPE tsung SYSTEM "/home/Desktop/tsung-1.7.0/tsung-1.0.dtd">
<tsung loglevel="warning">
<clients>
<client host="localhost" use_controller_vm="true"/>
</clients>
<servers>
<server host="127.0.0.1" port="8000" type="tcp"/>
</servers>
<load>
<arrivalphase phase="1" duration="2" unit="minute">
<users interarrival="45" unit="second"/>
</arrivalphase>
</load>
<options>
<option name="file_server" id="transactions" value="/home/Desktop/transactions.csv"/>
</options>
<sessions>
<session name="dummy" weight="1" type="ts_http">
<setdynvars sourcetype="file" fileid="transactions" delimiter=";" order="iter">
<var name="number_transaction"/>
</setdynvars>
<request>
<http url="...path..." method="GET" version="1.1"></http>
</request>
<request subst="true">
<http url='...path...' version='1.1' contents='transaction_id=%%_number_transaction%%' content_type='application/x-www-fomr-urlencoded' method='POST'></http>
</request>
</session>
</sessions>
</tsung>
After some attempt I've figure out that by simply removing the attribute content_type from the request it will make the whole configuration works!

Mule flowvars getting affected which preserves payload when the payload is changed

I am storing the output from DB in a flowvar #[flowvars.test] after which I am iterating the payload to remove few key, value pair. when the payload is modified inside the for loop the value which is stored in flowvars also getting modified.I am using mule 3.9 runtime.
<db:parameterized-query><![CDATA[select MachineName,TransactionId,SourceAPIName,Source,Target,ErrorCode,Severity,MailContent,ExceptionType,ExceptionMessage from Notification
where Notify='Y' and IsNotify='N']]>
</db:select>
</poll>
<expression-filter expression="#[payload.size()>0]" doc:name="Stop If No Records"/>
<set-variable variableName="test" value="#[message.payload]" doc:name="Variable"/>
<set-variable variableName="validatedEntries" value="#[[]]"
doc:name="ValidatedEntries" />
<logger level="INFO" doc:name="Logger"/>
<foreach collection="#[flowVars.test]" doc:name="For Each">
<logger level="INFO" doc:name="Logger"/>
<set-variable variableName="tempNotificationTable" value="#[payload.remove('TransactionID')]" doc:name="Temp NotificationTable"/>
<expression-component doc:name="Expression"><![CDATA[#[flowVars.validatedEntries.add(payload)]]]></expression-component>
</foreach>
For-Each will not create a deep copy of the object. Because of this, your original payload is getting changed.

Specify separate data to each user in Tsung

I am using Tsung for load testing. Here is the config file for Tsung.
<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE tsung SYSTEM "/usr/share/tsung/tsung-1.0.dtd" []>
<tsung loglevel="warning">
<clients>
<client host="t1" cpu="2" maxusers="30000000"/>
<client host="t2" cpu="2" maxusers="30000000"/>
</clients>
<servers>
<server host="localhost" port="9200" type="tcp"/>
</servers>
<load>
<arrivalphase phase="1" duration="1" unit="minute">
<users arrivalrate="5" unit="second"/>
</arrivalphase>
</load>
</tsung>
But, I want the following:
Only one user per client everytime
Specific data to be read from file for each user. As in, I want to read data from a user1.json for user1 (on client 1) and from user2.json for user2 (on client2).
Is this possible in Tsung? I went through the docs, but didn't find any option to do so. Can someone help me out with this?
Not exactly what you're asking for. But something similar is possible, with one input file.
<options>
<option name="file_server" id="inputUsers" value="/tmp/users.txt"/>
</options>
<sessions>
<session probability="100" name="test" type="ts_http" >
<setdynvars sourcetype="file" fileid="inputUsers" delimiter=";" order="iter">
<var name="userId"/>
<var name="deviceMac"/>
<var name="tKey"/>
</setdynvars>
<request subst="true">
<http url="/abc/%%_userId%%/%%_deviceMac%%?arg=%%_tKey%%" version="1.1"></http>
</request>
<request subst="true">
<http url="/123/%%_userId%%" version="1.1"></http>
</request>
</session>
</sessions>
Where /tmp/users.txt contains colon separated user specific values - something like this (userId;deviceMac;tKey):
97099;05d4e99de98a;4xrwgyyze54kefnwsd74kj4ghvn5f1
Considering the setdynvars order value is "iter", it will iterate through each line, and use that input data as request parameters.
In the above example case, it would make these two requests:
/abc/97099/05d4e99de98a?arg=4xrwgyyze54kefnwsd74kj4ghvn5f1
/123/97099
You can achieve "user specific" load test scenario this way.

How to use Stored Procedure with Mule 3.5 Batch Processing

I am using Mule 3.5 Anypoint connector and have moved from a select query to a stored procedures in the batch processing scope component. With that change mule does not like the object type that the stored procedure returns.
Here is the error that I get back:
ERROR 2014-06-26 15:15:00,426 [pool-15-thread-1] org.mule.exception.DefaultMessagingExceptionStrategy:
********************************************************************************
Message : Object "java.util.HashMap" not of correct type. It must be of type "{interface java.lang.Iterable,interface java.util.Iterator,interface org.mule.routing.MessageSequence,interface java.util.Collection}" (java.lang.IllegalArgumentException)
Code : MULE_ERROR--2
--------------------------------------------------------------------------------
The type of object returned from the database connector using a stored procedure is as such:
java.util.HashMap
With the Select statement (this works) the type is as such:
org.mule.util.CaseInsensitiveHashMap
Like stated above with the select statement this does work.
Some extra information about the system:
It is SQL Server 2008 R2
The database connector works fine with the stored procedure but errors when it reaches the process records section
<batch:job name="ons-esb-mainBatch1">
<batch:threading-profile poolExhaustedAction="WAIT"/>
<batch:input>
<poll doc:name="Poll">
<fixed-frequency-scheduler frequency="15" timeUnit="SECONDS"/>
<db:stored-procedure config-ref="Generic_Database_Configuration" doc:name="Database">
<db:parameterized-query><![CDATA[{ CALL otis.GetEntityQueueByTime() }]]></db:parameterized-query>
</db:stored-procedure>
</poll>
<logger level="INFO" doc:name="Logger"/>
</batch:input>
<batch:process-records>
<batch:step name="Batch_Step">
<choice doc:name="Choice">
<!-- Choice Selector Logic -- Taken Out to Save Space --!>
</choice>
</batch:step>
</batch:process-records>
<batch:on-complete>
<logger message="EntityQueues Completed Queueing into ActiveMQ" level="INFO" doc:name="Logger"/>
</batch:on-complete>
Summary
I would like to find a way to have the object be processed through the batch process will work as a select statement would.
A java.util.HashMap is not iterable. Try replacing the message payload with its entrySet():
<batch:input>
<poll doc:name="Poll">
<fixed-frequency-scheduler frequency="15" timeUnit="SECONDS"/>
<db:stored-procedure config-ref="Generic_Database_Configuration" doc:name="Database">
<db:parameterized-query><![CDATA[{ CALL otis.GetEntityQueueByTime() }]]></db:parameterized-query>
</db:stored-procedure>
</poll>
<set-payload value="#[message.payload.entrySet()]" />
<logger level="INFO" doc:name="Logger"/>
</batch:input>

Iterating an array using Mule's foreach scope

I would like to iterate through an array and use the value taken from the array to put it within an http inbound endpoint. How would I be able to iterate through this array and take the value from the array to place it as a variable within the http inbound endpoint?
The code that I used to try was:
<flow name="foreachFlow1" doc:name="foreachFlow1">
<poll frequency="2000">
<foreach collection="#[groovy:['localhost:8082', 'localhost:8083']]"
doc:name="For Each">
<http:outbound-endpoint exchange-pattern="request-response"
address="http://#[payload]" method="GET" doc:name="HTTP" />
</foreach>
</poll>
</flow>
and I get the error
Invalid content was found starting with element 'poll'
Inbound endpoints are message sources and can not be parametrized the way you're describing.
To achieve your goal, trying a <poll> message source to wrap a foreach that uses http:outbound-endpoint to perform GET (#method) request-response (#exchange-apttern) interactions.
The trick is to bring the results for the HTTP calls back through the foreach, which by default do not do it. The following illustrate a potential approach:
<flow name="foreachFlow1">
<poll frequency="2000">
<processor-chain>
<set-variable variableName="httpResponses" value="#[[]]" />
<foreach collection="#[groovy:['localhost:8082', 'localhost:8083']]">
<http:outbound-endpoint
exchange-pattern="request-response" address="http://#[payload]"
method="GET" />
<expression-component>httpResponses.add(message.payloadAs(java.lang.String))
</expression-component>
</foreach>
</processor-chain>
</poll>
<logger level="INFO" message="#[httpResponses]" />
</flow>
<!-- Test server stubs -->
<flow name="server8082">
<http:inbound-endpoint exchange-pattern="request-response"
address="http://localhost:8082" />
<set-payload value="This is 8082" />
</flow>
<flow name="server8083">
<http:inbound-endpoint exchange-pattern="request-response"
address="http://localhost:8083" />
<set-payload value="This is 8083" />
</flow>

Resources