I am working with Amazon Kinesis data streams. My Kinesis stream consists of only one shard.
I am trying to read data (records) from the stream after writing some data (records) to the same stream. My records are simple JSON's.
I can see through the Amazon console the readings and the writings.
When I try to print the content of the record with "record.getData()" I got this error :
java.nio.HeapByteBuffer[pos=4 lim=4 cap=4]
20:35:59.118 [RecordProcessor-0000] WARN com.kinesisconsumer.AmazonKinesisApplicationSampleRecordProcessor - Caught throwable while processing record UserRecord [subSequenceNumber=0, explicitHashKey=null, aggregated=false, getSequenceNumber()=49593662497507120518174908605360552573875197411355262978, getData()=java.nio.HeapByteBuffer[pos=4 lim=4 cap=4], getPartitionKey()=12345]
java.lang.StringIndexOutOfBoundsException: String index out of range: -9
at java.lang.String.substring(String.java:1931)
at com.kinesisconsumer.AmazonKinesisApplicationSampleRecordProcessor.processSingleRecord(AmazonKinesisApplicationSampleRecordProcessor.java:112)
at com.kinesisconsumer.AmazonKinesisApplicationSampleRecordProcessor.processRecordsWithRetries(AmazonKinesisApplicationSampleRecordProcessor.java:75)
at com.kinesisconsumer.AmazonKinesisApplicationSampleRecordProcessor.processRecords(AmazonKinesisApplicationSampleRecordProcessor.java:53)
at com.amazonaws.services.kinesis.clientlibrary.lib.worker.V1ToV2RecordProcessorAdapter.processRecords(V1ToV2RecordProcessorAdapter.java:42)
at com.amazonaws.services.kinesis.clientlibrary.lib.worker.ProcessTask.callProcessRecords(ProcessTask.java:221)
at com.amazonaws.services.kinesis.clientlibrary.lib.worker.ProcessTask.call(ProcessTask.java:176)
at com.amazonaws.services.kinesis.clientlibrary.lib.worker.MetricsCollectingTaskDecorator.call(MetricsCollectingTaskDecorator.java:49)
at com.amazonaws.services.kinesis.clientlibrary.lib.worker.MetricsCollectingTaskDecorator.call(MetricsCollectingTaskDecorator.java:24)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Here is my code :
public class AmazonKinesisApplicationRecordProcessorFactory implements IRecordProcessorFactory {
/**
* {#inheritDoc}
*/
#Override
public IRecordProcessor createProcessor() {
return new AmazonKinesisApplicationSampleRecordProcessor();
}
}
public final class AmazonKinesisApplicationSample {
public static final String SAMPLE_APPLICATION_STREAM_NAME = "LimorKinesis";
private static final String SAMPLE_APPLICATION_NAME = "SampleKinesisApplication";
// Initial position in the stream when the application starts up for the first time.
// Position can be one of LATEST (most recent data) or TRIM_HORIZON (oldest available data)
private static final InitialPositionInStream SAMPLE_APPLICATION_INITIAL_POSITION_IN_STREAM =
InitialPositionInStream.LATEST;
private static ProfileCredentialsProvider credentialsProvider;
private static void init() {
// Ensure the JVM will refresh the cached IP values of AWS resources (e.g. service endpoints).
java.security.Security.setProperty("networkaddress.cache.ttl", "60");
/*
* The ProfileCredentialsProvider will return your [default]
* credential profile by reading from the credentials file located at
* (~/.aws/credentials).
*/
credentialsProvider = new ProfileCredentialsProvider();
try {
credentialsProvider.getCredentials();
} catch (Exception e) {
throw new AmazonClientException("Cannot load the credentials from the credential profiles file. "
+ "Please make sure that your credentials file is at the correct "
+ "location (~/.aws/credentials), and is in valid format.", e);
}
}
public static void main(String[] args) throws Exception {
init();
if (args.length == 1 && "delete-resources".equals(args[0])) {
deleteResources();
return;
}
String workerId = InetAddress.getLocalHost().getCanonicalHostName() + ":" + UUID.randomUUID();
KinesisClientLibConfiguration kinesisClientLibConfiguration =
new KinesisClientLibConfiguration(SAMPLE_APPLICATION_NAME,
SAMPLE_APPLICATION_STREAM_NAME,
credentialsProvider,
workerId);
kinesisClientLibConfiguration.withInitialPositionInStream(SAMPLE_APPLICATION_INITIAL_POSITION_IN_STREAM);
kinesisClientLibConfiguration.withRegionName("us-west-2");//todo : added region west-2
IRecordProcessorFactory recordProcessorFactory = new AmazonKinesisApplicationRecordProcessorFactory();
Worker worker = new Worker(recordProcessorFactory, kinesisClientLibConfiguration);
System.out.printf("Running %s to process stream %s as worker %s...\n",
SAMPLE_APPLICATION_NAME,
SAMPLE_APPLICATION_STREAM_NAME,
workerId);
int exitCode = 0;
try {
worker.run();
} catch (Throwable t) {
System.err.println("Caught throwable while processing data.");
t.printStackTrace();
exitCode = 1;
}
System.exit(exitCode);
}
public static void deleteResources() {
// Delete the stream
AmazonKinesis kinesis = AmazonKinesisClientBuilder.standard()
.withCredentials(credentialsProvider)
.withRegion("us-west-2")
.build();
System.out.printf("Deleting the Amazon Kinesis stream used by the sample. Stream Name = %s.\n",
SAMPLE_APPLICATION_STREAM_NAME);
try {
kinesis.deleteStream(SAMPLE_APPLICATION_STREAM_NAME);
} catch (ResourceNotFoundException ex) {
// The stream doesn't exist.
}
// Delete the table
AmazonDynamoDB dynamoDB = AmazonDynamoDBClientBuilder.standard()
.withCredentials(credentialsProvider)
.withRegion("us-west-2")
.build();
System.out.printf("Deleting the Amazon DynamoDB table used by the Amazon Kinesis Client Library. Table Name = %s.\n",
SAMPLE_APPLICATION_NAME);
try {
dynamoDB.deleteTable(SAMPLE_APPLICATION_NAME);
} catch (com.amazonaws.services.dynamodbv2.model.ResourceNotFoundException ex) {
// The table doesn't exist.
}
}
}
public class AmazonKinesisApplicationSampleRecordProcessor implements IRecordProcessor {
private static final Log LOG = LogFactory.getLog(AmazonKinesisApplicationSampleRecordProcessor.class);
private String kinesisShardId;
// Backoff and retry settings
private static final long BACKOFF_TIME_IN_MILLIS = 3000L;
private static final int NUM_RETRIES = 10;
// Checkpoint about once a minute
private static final long CHECKPOINT_INTERVAL_MILLIS = 60000L;
private long nextCheckpointTimeInMillis;
private final CharsetDecoder decoder = Charset.forName("UTF-8").newDecoder();
/**
* {#inheritDoc}
*/
#Override
public void initialize(String shardId) {
LOG.info("Initializing record processor for shard: " + shardId);
this.kinesisShardId = shardId;
}
/**
* {#inheritDoc}
*/
#Override
public void processRecords(List<Record> records, IRecordProcessorCheckpointer checkpointer) {
LOG.info("Processing " + records.size() + " records from " + kinesisShardId);
// Process records and perform all exception handling.
processRecordsWithRetries(records);
// Checkpoint once every checkpoint interval.
if (System.currentTimeMillis() > nextCheckpointTimeInMillis) {
checkpoint(checkpointer);
nextCheckpointTimeInMillis = System.currentTimeMillis() + CHECKPOINT_INTERVAL_MILLIS;
}
}
/**
* Process records performing retries as needed. Skip "poison pill" records.
*
* #param records Data records to be processed.
*/
private void processRecordsWithRetries(List<Record> records) {
for (Record record : records) {
boolean processedSuccessfully = false;
for (int i = 0; i < NUM_RETRIES; i++) {
try {
//
// Logic to process record goes here.
//
processSingleRecord(record);
processedSuccessfully = true;
break;
} catch (Throwable t) {
LOG.warn("Caught throwable while processing record " + record, t);
}
// backoff if we encounter an exception.
try {
Thread.sleep(BACKOFF_TIME_IN_MILLIS);
} catch (InterruptedException e) {
LOG.debug("Interrupted sleep", e);
}
}
if (!processedSuccessfully) {
LOG.error("Couldn't process record " + record + ". Skipping the record.");
}
}
}
/**
* Process a single record.
*
* #param record The record to be processed.
*/
private void processSingleRecord(Record record) {
System.out.println(record.getData());
String data = null;
try {
// For this app, we interpret the payload as UTF-8 chars.
data = decoder.decode(record.getData()).toString();
// Assume this record came from AmazonKinesisSample and log its age.
long recordCreateTime = new Long(data.substring("testData-".length()));
long ageOfRecordInMillis = System.currentTimeMillis() - recordCreateTime;
LOG.info(record.getSequenceNumber() + ", " + record.getPartitionKey() + ", " + data + ", Created "
+ ageOfRecordInMillis + " milliseconds ago.");
} catch (NumberFormatException e) {
LOG.info("Record does not match sample record format. Ignoring record with data; " + data);
} catch (CharacterCodingException e) {
LOG.error("Malformed data: " + data, e);
}
}
/**
* {#inheritDoc}
*/
#Override
public void shutdown(IRecordProcessorCheckpointer checkpointer, ShutdownReason reason) {
LOG.info("Shutting down record processor for shard: " + kinesisShardId);
// Important to checkpoint after reaching end of shard, so we can start processing data from child shards.
if (reason == ShutdownReason.TERMINATE) {
checkpoint(checkpointer);
}
}
/** Checkpoint with retries.
* #param checkpointer
*/
private void checkpoint(IRecordProcessorCheckpointer checkpointer) {
LOG.info("Checkpointing shard " + kinesisShardId);
for (int i = 0; i < NUM_RETRIES; i++) {
try {
checkpointer.checkpoint();
break;
} catch (ShutdownException se) {
// Ignore checkpoint if the processor instance has been shutdown (fail over).
LOG.info("Caught shutdown exception, skipping checkpoint.", se);
break;
} catch (ThrottlingException e) {
// Backoff and re-attempt checkpoint upon transient failures
if (i >= (NUM_RETRIES - 1)) {
LOG.error("Checkpoint failed after " + (i + 1) + "attempts.", e);
break;
} else {
LOG.info("Transient issue when checkpointing - attempt " + (i + 1) + " of "
+ NUM_RETRIES, e);
}
} catch (InvalidStateException e) {
// This indicates an issue with the DynamoDB table (check for table, provisioned IOPS).
LOG.error("Cannot save checkpoint to the DynamoDB table used by the Amazon Kinesis Client Library.", e);
break;
}
try {
Thread.sleep(BACKOFF_TIME_IN_MILLIS);
} catch (InterruptedException e) {
LOG.debug("Interrupted sleep", e);
}
}
}
}
public class AmazonKinesisRecordProducerSample {
private static AmazonKinesis kinesis;
private static void init() throws Exception {
/*
* The ProfileCredentialsProvider will return your [default]
* credential profile by reading from the credentials file located at
* (~/.aws/credentials).
*/
ProfileCredentialsProvider credentialsProvider = new ProfileCredentialsProvider();
try {
credentialsProvider.getCredentials();
} catch (Exception e) {
throw new AmazonClientException(
"Cannot load the credentials from the credential profiles file. " +
"Please make sure that your credentials file is at the correct " +
"location (~/.aws/credentials), and is in valid format.",
e);
}
kinesis = AmazonKinesisClientBuilder.standard()
.withCredentials(credentialsProvider)
.withRegion("us-west-2")
.build();
}
public static void main(String[] args) throws Exception {
init();
final String myStreamName = AmazonKinesisApplicationSample.SAMPLE_APPLICATION_STREAM_NAME;
final Integer myStreamSize = 1;
// Describe the stream and check if it exists.
DescribeStreamRequest describeStreamRequest = new DescribeStreamRequest().withStreamName(myStreamName);
try {
StreamDescription streamDescription = kinesis.describeStream(describeStreamRequest).getStreamDescription();
System.out.printf("Stream %s has a status of %s.\n", myStreamName, streamDescription.getStreamStatus());
if ("DELETING".equals(streamDescription.getStreamStatus())) {
System.out.println("Stream is being deleted. This sample will now exit.");
System.exit(0);
}
// Wait for the stream to become active if it is not yet ACTIVE.
if (!"ACTIVE".equals(streamDescription.getStreamStatus())) {
waitForStreamToBecomeAvailable(myStreamName);
}
} catch (ResourceNotFoundException ex) {
System.out.printf("Stream %s does not exist. Creating it now.\n", myStreamName);
// Create a stream. The number of shards determines the provisioned throughput.
CreateStreamRequest createStreamRequest = new CreateStreamRequest();
createStreamRequest.setStreamName(myStreamName);
createStreamRequest.setShardCount(myStreamSize);
kinesis.createStream(createStreamRequest);
// The stream is now being created. Wait for it to become active.
waitForStreamToBecomeAvailable(myStreamName);
}
// List all of my streams.
ListStreamsRequest listStreamsRequest = new ListStreamsRequest();
listStreamsRequest.setLimit(10);
ListStreamsResult listStreamsResult = kinesis.listStreams(listStreamsRequest);
List<String> streamNames = listStreamsResult.getStreamNames();
while (listStreamsResult.isHasMoreStreams()) {
if (streamNames.size() > 0) {
listStreamsRequest.setExclusiveStartStreamName(streamNames.get(streamNames.size() - 1));
}
listStreamsResult = kinesis.listStreams(listStreamsRequest);
streamNames.addAll(listStreamsResult.getStreamNames());
}
// Print all of my streams.
System.out.println("List of my streams: ");
for (int i = 0; i < streamNames.size(); i++) {
System.out.println("\t- " + streamNames.get(i));
}
System.out.printf("Putting records in stream : %s until this application is stopped...\n", myStreamName);
System.out.println("Press CTRL-C to stop.");
// Write records to the stream until this program is aborted.
while (true) {
long createTime = System.currentTimeMillis();
PutRecordRequest putRecordRequest = new PutRecordRequest();
putRecordRequest.setStreamName(myStreamName);
putRecordRequest.setData(ByteBuffer.wrap(String.format("testData-%d", createTime).getBytes()));
putRecordRequest.setPartitionKey(String.format("partitionKey-%d", createTime));
PutRecordResult putRecordResult = kinesis.putRecord(putRecordRequest);
System.out.printf("Successfully put record, partition key : %s, ShardID : %s, SequenceNumber : %s.\n",
putRecordRequest.getPartitionKey(),
putRecordResult.getShardId(),
putRecordResult.getSequenceNumber());
}
}
private static void waitForStreamToBecomeAvailable(String myStreamName) throws InterruptedException {
System.out.printf("Waiting for %s to become ACTIVE...\n", myStreamName);
long startTime = System.currentTimeMillis();
long endTime = startTime + TimeUnit.MINUTES.toMillis(10);
while (System.currentTimeMillis() < endTime) {
Thread.sleep(TimeUnit.SECONDS.toMillis(20));
try {
DescribeStreamRequest describeStreamRequest = new DescribeStreamRequest();
describeStreamRequest.setStreamName(myStreamName);
// ask for no more than 10 shards at a time -- this is an optional parameter
describeStreamRequest.setLimit(10);
DescribeStreamResult describeStreamResponse = kinesis.describeStream(describeStreamRequest);
String streamStatus = describeStreamResponse.getStreamDescription().getStreamStatus();
System.out.printf("\t- current state: %s\n", streamStatus);
if ("ACTIVE".equals(streamStatus)) {
return;
}
} catch (ResourceNotFoundException ex) {
// ResourceNotFound means the stream doesn't exist yet,
// so ignore this error and just keep polling.
} catch (AmazonServiceException ase) {
throw ase;
}
}
throw new RuntimeException(String.format("Stream %s never became active", myStreamName));
}
}
I used the sample code from this link :
https://github.com/aws/aws-sdk-java/tree/master/src/samples/AmazonKinesis
Try to change Application Name and then retry. Most of problems gets resolved by this simple change.
or try this code below.
AmazonKinesisApplicationSample.java :-
package KinesiSampleApplication.www.intellyzen.com;
/*
* Copyright 2012-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
import java.net.InetAddress;
import java.util.UUID;
import com.amazonaws.AmazonClientException;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.profile.ProfileCredentialsProvider;
import com.amazonaws.services.dynamodbv2.AmazonDynamoDB;
import com.amazonaws.services.dynamodbv2.AmazonDynamoDBClientBuilder;
import com.amazonaws.services.kinesis.AmazonKinesis;
import com.amazonaws.services.kinesis.AmazonKinesisClientBuilder;
import com.amazonaws.services.kinesis.clientlibrary.interfaces.IRecordProcessorFactory;
import com.amazonaws.services.kinesis.clientlibrary.lib.worker.InitialPositionInStream;
import com.amazonaws.services.kinesis.clientlibrary.lib.worker.KinesisClientLibConfiguration;
import com.amazonaws.services.kinesis.clientlibrary.lib.worker.Worker;
import com.amazonaws.services.kinesis.model.ResourceNotFoundException;
/**
* Sample Amazon Kinesis Application.
*/
public final class AmazonKinesisApplicationSample {
/*
* Before running the code:
* Fill in your AWS access credentials in the provided credentials
* file template, and be sure to move the file to the default location
* (~/.aws/credentials) where the sample code will load the
* credentials from.
* https://console.aws.amazon.com/iam/home?#security_credential
*
* WARNING:
* To avoid accidental leakage of your credentials, DO NOT keep
* the credentials file in your source directory.
*/
public static final String SAMPLE_APPLICATION_STREAM_NAME = "IOTREST-API";
private static final String SAMPLE_APPLICATION_NAME = "SampleKinesisApplicationadsfdsa11 ";
// Initial position in the stream when the application starts up for the first time.
// Position can be one of LATEST (most recent data) or TRIM_HORIZON (oldest available data)
private static final InitialPositionInStream SAMPLE_APPLICATION_INITIAL_POSITION_IN_STREAM =
InitialPositionInStream.LATEST;
private static ProfileCredentialsProvider credentialsProvider;
private static void init() {
// Ensure the JVM will refresh the cached IP values of AWS resources (e.g. service endpoints).
java.security.Security.setProperty("networkaddress.cache.ttl", "60");
/*
* The ProfileCredentialsProvider will return your [default]
* credential profile by reading from the credentials file located at
* (~/.aws/credentials).
*/
credentialsProvider = new ProfileCredentialsProvider();
try {
credentialsProvider.getCredentials();
} catch (Exception e) {
throw new AmazonClientException("Cannot load the credentials from the credential profiles file. "
+ "Please make sure that your credentials file is at the correct "
+ "location (~/.aws/credentials), and is in valid format.", e);
}
}
public static void main(String[] args) throws Exception {
init();
if (args.length == 1 && "delete-resources".equals(args[0])) {
deleteResources();
return;
}
String workerId = InetAddress.getLocalHost().getCanonicalHostName() + ":" + UUID.randomUUID();
KinesisClientLibConfiguration kinesisClientLibConfiguration =
new KinesisClientLibConfiguration(SAMPLE_APPLICATION_NAME,
SAMPLE_APPLICATION_STREAM_NAME,
credentialsProvider,
workerId);
kinesisClientLibConfiguration.withInitialPositionInStream(SAMPLE_APPLICATION_INITIAL_POSITION_IN_STREAM);
IRecordProcessorFactory recordProcessorFactory = new AmazonKinesisApplicationRecordProcessorFactory();
Worker worker = new Worker(recordProcessorFactory, kinesisClientLibConfiguration);
System.out.printf("Running %s to process stream %s as worker %s...\n",
SAMPLE_APPLICATION_NAME,
SAMPLE_APPLICATION_STREAM_NAME,
workerId);
int exitCode = 0;
try {
worker.run();
} catch (Throwable t) {
System.err.println("Caught throwable while processing data.");
t.printStackTrace();
exitCode = 1;
}
System.exit(exitCode);
}
public static void deleteResources() {
// Delete the stream
AmazonKinesis kinesis = AmazonKinesisClientBuilder.standard()
.withCredentials(credentialsProvider)
.withRegion("us-east-1")
.build();
System.out.printf("Deleting the Amazon Kinesis stream used by the sample. Stream Name = %s.\n",
SAMPLE_APPLICATION_STREAM_NAME);
try {
kinesis.deleteStream(SAMPLE_APPLICATION_STREAM_NAME);
} catch (ResourceNotFoundException ex) {
// The stream doesn't exist.
}
// Delete the table
AmazonDynamoDB dynamoDB = AmazonDynamoDBClientBuilder.standard()
.withCredentials(credentialsProvider)
.withRegion("us-east-1")
.build();
System.out.printf("Deleting the Amazon DynamoDB table used by the Amazon Kinesis Client Library. Table Name = %s.\n",
SAMPLE_APPLICATION_NAME);
try {
dynamoDB.deleteTable(SAMPLE_APPLICATION_NAME);
} catch (com.amazonaws.services.dynamodbv2.model.ResourceNotFoundException ex) {
// The table doesn't exist.
}
}
}
AmazonKinesisApplicationSampleRecordProcessor.java:-
package KinesiSampleApplication.www.intellyzen.com;
/*
* Copyright 2012-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
import java.nio.charset.CharacterCodingException;
import java.nio.charset.Charset;
import java.nio.charset.CharsetDecoder;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.amazonaws.services.kinesis.clientlibrary.exceptions.InvalidStateException;
import com.amazonaws.services.kinesis.clientlibrary.exceptions.ShutdownException;
import com.amazonaws.services.kinesis.clientlibrary.exceptions.ThrottlingException;
import com.amazonaws.services.kinesis.clientlibrary.interfaces.IRecordProcessor;
import com.amazonaws.services.kinesis.clientlibrary.interfaces.IRecordProcessorCheckpointer;
import com.amazonaws.services.kinesis.model.Record;
import software.amazon.kinesis.lifecycle.ShutdownReason;
/**
* Processes records and checkpoints progress.
*/
public class AmazonKinesisApplicationSampleRecordProcessor implements IRecordProcessor {
private static final Log LOG = LogFactory.getLog(AmazonKinesisApplicationSampleRecordProcessor.class);
private String kinesisShardId;
// Backoff and retry settings
private static final long BACKOFF_TIME_IN_MILLIS = 3000L;
private static final int NUM_RETRIES = 10;
// Checkpoint about once a minute
private static final long CHECKPOINT_INTERVAL_MILLIS = 60000L;
private long nextCheckpointTimeInMillis;
private final CharsetDecoder decoder = Charset.forName("UTF-8").newDecoder();
/**
* {#inheritDoc}
*/
public void initialize(String shardId) {
LOG.info("Initializing record processor for shard: " + shardId);
this.kinesisShardId = shardId;
}
/**
* {#inheritDoc}
*/
public void processRecords(List<Record> records, IRecordProcessorCheckpointer checkpointer) {
LOG.info("Processing " + records.size() + " records from " + kinesisShardId);
// Process records and perform all exception handling.
processRecordsWithRetries(records);
// Checkpoint once every checkpoint interval.
if (System.currentTimeMillis() > nextCheckpointTimeInMillis) {
checkpoint(checkpointer);
nextCheckpointTimeInMillis = System.currentTimeMillis() + CHECKPOINT_INTERVAL_MILLIS;
}
}
/**
* Process records performing retries as needed. Skip "poison pill" records.
*
* #param records Data records to be processed.
*/
private void processRecordsWithRetries(List<Record> records) {
for (Record record : records) {
boolean processedSuccessfully = false;
for (int i = 0; i < NUM_RETRIES; i++) {
try {
//
// Logic to process record goes here.
//
processSingleRecord(record);
processedSuccessfully = true;
break;
} catch (Throwable t) {
LOG.warn("Caught throwable while processing record " + record, t);
}
// backoff if we encounter an exception.
try {
Thread.sleep(BACKOFF_TIME_IN_MILLIS);
} catch (InterruptedException e) {
LOG.debug("Interrupted sleep", e);
}
}
if (!processedSuccessfully) {
LOG.error("Couldn't process record " + record + ". Skipping the record.");
}
}
}
/**
* Process a single record.
*
* #param record The record to be processed.
*/
private void processSingleRecord(Record record) {
// TODO Add your own record processing logic here
String data = null;
try {
// For this app, we interpret the payload as UTF-8 chars.
data = decoder.decode(record.getData()).toString();
System.out.println(data);
System.out.println("\n");
// Assume this record came from AmazonKinesisSample and log its age.
long recordCreateTime = new Long(data.substring("testData-".length()));
long ageOfRecordInMillis = System.currentTimeMillis() - recordCreateTime;
LOG.info(record.getSequenceNumber() + ", " + record.getPartitionKey() + ", " + data + ", Created "
+ ageOfRecordInMillis + " milliseconds ago.");
} catch (NumberFormatException e) {
LOG.info("Record does not match sample record format. Ignoring record with data; " + data);}
catch (CharacterCodingException e) {
LOG.error("Malformed data: " + data, e);
}
}
/**
* {#inheritDoc}
*/
public void shutdown(IRecordProcessorCheckpointer checkpointer, ShutdownReason reason) {
LOG.info("Shutting down record processor for shard: " + kinesisShardId);
// Important to checkpoint after reaching end of shard, so we can start processing data from child shards.
if (reason == ShutdownReason.LEASE_LOST) {
checkpoint(checkpointer);
}
}
/** Checkpoint with retries.
* #param checkpointer
*/
private void checkpoint(IRecordProcessorCheckpointer checkpointer) {
LOG.info("Checkpointing shard " + kinesisShardId);
for (int i = 0; i < NUM_RETRIES; i++) {
try {
checkpointer.checkpoint();
break;
} catch (ShutdownException se) {
// Ignore checkpoint if the processor instance has been shutdown (fail over).
LOG.info("Caught shutdown exception, skipping checkpoint.", se);
break;
} catch (ThrottlingException e) {
// Backoff and re-attempt checkpoint upon transient failures
if (i >= (NUM_RETRIES - 1)) {
LOG.error("Checkpoint failed after " + (i + 1) + "attempts.", e);
break;
} else {
LOG.info("Transient issue when checkpointing - attempt " + (i + 1) + " of "
+ NUM_RETRIES, e);
}
} catch (InvalidStateException e) {
// This indicates an issue with the DynamoDB table (check for table, provisioned IOPS).
LOG.error("Cannot save checkpoint to the DynamoDB table used by the Amazon Kinesis Client Library.", e);
break;
}
try {
Thread.sleep(BACKOFF_TIME_IN_MILLIS);
} catch (InterruptedException e) {
LOG.debug("Interrupted sleep", e);
}
}
}
public void shutdown(IRecordProcessorCheckpointer checkpointer,
com.amazonaws.services.kinesis.clientlibrary.lib.worker.ShutdownReason reason) {
// TODO Auto-generated method stub
}
}
AmazonKinesisApplicationRecordProcessorFactory.java:-
package KinesiSampleApplication.www.intellyzen.com;
/*
* Copyright 2012-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
import com.amazonaws.services.kinesis.clientlibrary.interfaces.IRecordProcessor;
import com.amazonaws.services.kinesis.clientlibrary.interfaces.IRecordProcessorFactory;
/**
* Used to create new record processors.
*/
public class AmazonKinesisApplicationRecordProcessorFactory implements IRecordProcessorFactory {
/**
* {#inheritDoc}
*/
public IRecordProcessor createProcessor() {
return new AmazonKinesisApplicationSampleRecordProcessor();
}
}
pom.xml:-
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>KinesiSampleApplication</groupId>
<artifactId>www.intellyzen.com</artifactId>
<version>0.0.1-SNAPSHOT</version>
<packaging>jar</packaging>
<name>www.intellyzen.com</name>
<url>http://maven.apache.org</url>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>3.8.1</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>software.amazon.kinesis</groupId>
<artifactId>amazon-kinesis-client</artifactId>
<version>2.2.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.amazonaws/aws-java-sdk -->
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-kinesis</artifactId>
<version>1.11.551</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.amazonaws/amazon-kinesis-client -->
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>amazon-kinesis-client</artifactId>
<version>1.10.0</version>
</dependency>
<dependency>
<groupId>software.amazon.kinesis</groupId>
<artifactId>amazon-kinesis-client</artifactId>
<version>2.2.0</version>
</dependency>
<!-- Thanks for using https://jar-download.com -->
<!-- https://mvnrepository.com/artifact/com.fasterxml.jackson.dataformat/jackson-dataformat-cbor -->
<dependency>
<groupId>com.fasterxml.jackson.dataformat</groupId>
<artifactId>jackson-dataformat-cbor</artifactId>
<version>2.9.8</version>
</dependency>
</dependencies>
</project>
Your use case -- "I am trying to read data (records) from the stream"
You can find all AWS Java V2 examples here: https://github.com/awsdocs/aws-doc-sdk-examples/tree/master/javav2/example_code/kinesis
Here is the Solution using the AWS Kinesis Java v2 API...
package com.example.kinesis;
//snippet-start:[kinesis.java2.getrecord.import]
import software.amazon.awssdk.core.SdkBytes;
import software.amazon.awssdk.regions.Region;
import software.amazon.awssdk.services.kinesis.KinesisClient;
import software.amazon.awssdk.services.kinesis.model.*;
import java.util.ArrayList;
import java.util.List;
//snippet-end:[kinesis.java2.getrecord.import]
/**
* Demonstrates how to read data from a Kinesis Data Stream. Before running this Java code example, populate a Data Stream
* by running the StockTradesWriter example. That example populates a Data Stream that you can then use for this example.
*/
public class GetRecords {
public static void main(String[] args) {
// snippet-start:[kinesis.java2.getrecord.main]
Region region = Region.US_EAST_1;
KinesisClient kinesisClient = KinesisClient.builder()
.region(region)
.build();
getStockTrades(kinesisClient);
}
private static void getStockTrades(KinesisClient kinesisClient) {
String shardIterator;
String lastShardId = null;
// Retrieve the Shards from a Stream
DescribeStreamRequest describeStreamRequest = DescribeStreamRequest.builder()
.streamName("StockTrade")
.build();
List<Shard> shards = new ArrayList<>();
DescribeStreamResponse streamRes;
do {
// describeStreamRequest.exclusiveStartShardId(lastShardId);
streamRes = kinesisClient.describeStream(describeStreamRequest);
shards.addAll(streamRes.streamDescription().shards());
if (shards.size() > 0) {
lastShardId = shards.get(shards.size() - 1).shardId();
}
} while (streamRes.streamDescription().hasMoreShards());
GetShardIteratorRequest itReq = GetShardIteratorRequest.builder()
.streamName("StockTrade")
.shardIteratorType("TRIM_HORIZON")
.shardId(shards.get(0).shardId())
.build();
GetShardIteratorResponse shardIteratorResult = kinesisClient.getShardIterator(itReq);
shardIterator = shardIteratorResult.shardIterator();
// Continuously read data records from shard.
List<Record> records;
while (true) {
// Create new GetRecordsRequest with existing shardIterator.
// Set maximum records to return to 1000.
GetRecordsRequest recordsRequest = GetRecordsRequest.builder()
.shardIterator(shardIterator)
.limit(1000)
.build();
GetRecordsResponse result = kinesisClient.getRecords(recordsRequest);
// Put result into record list. Result may be empty.
records = result.records();
// Print records
for (Record record : records) {
SdkBytes byteBuffer = record.data();
System.out.println(String.format("Seq No: %s - %s", record.sequenceNumber(),
new String(byteBuffer.asByteArray())));
}
try {
Thread.sleep(1000);
} catch (InterruptedException exception) {
throw new RuntimeException(exception);
}
shardIterator = result.nextShardIterator();
}
// snippet-end:[kinesis.java2.getrecord.main]
}
}
Related
I try to run basic example of usage PUSH query using ksqldb java client:
public class Main {
public static String KSQLDB_SERVER_HOST = "localhost";
public static int KSQLDB_SERVER_HOST_PORT = 8088;
public static void main(String[] args) throws ExecutionException, InterruptedException {
ClientOptions options = ClientOptions.create()
.setHost(KSQLDB_SERVER_HOST)
.setPort(KSQLDB_SERVER_HOST_PORT);
Client client = Client.create(options);
client.streamQuery("SELECT * FROM users EMIT CHANGES;")
.thenAccept(streamedQueryResult -> {
System.out.println("Query has started. Query ID: " + streamedQueryResult.queryID());
RowSubscriber subscriber = new RowSubscriber();
streamedQueryResult.subscribe(subscriber);
}).exceptionally(e -> {
System.out.println("Request failed: " + e);
return null;
});
client.close();
}
}
I run Confluent env using docker-compose file:
https://github.com/confluentinc/cp-all-in-one/blob/6.0.0-post/cp-all-in-one/docker-compose.yml and create users topic with data.
But got exception inside io.netty.resolver.AddressResolverGroup class, getResolver(final EventExecutor executor) method:
Request failed: java.util.concurrent.CompletionException:
java.lang.IllegalStateException: executor not accepting a task
But all works fine when I run PULL query with synchronous usage:
StreamedQueryResult streamedQueryResult = client.streamQuery("SELECT * FROM users EMIT CHANGES;").get();
for (int i = 0; i < 10; i++) {
// Block until a new row is available
Row row = streamedQueryResult.poll();
if (row != null) {
System.out.println("Row: " + row.values());
}
}
You should not close the client connection while streaming.
Remove the client.close(); to test.
I have two scenario when play mp4 file on iOS devices.
MP4 access by Nginx is working:
Put mp4 file in /html and using following configure, then iOS devices and chrome browser can play mp4 files properly.
server {
listen 127.0.0.1:80 default_server;
location ~ ^/storage\/*.mp4 {
root html;
}
}
MP4 access by Tomcat Spring MVC is not working: When I request by Spring mvc restful API and return ResponseEntity contains Resource Object, in chrome browser will receive and play mp4 properly. But iOS devices not working
#GetMapping(value = "/storage/{filename:.+}")
#ResponseBody
public ResponseEntity<org.springframework.core.io.Resource> accessStorageFile(HttpServletResponse response, #PathVariable String filename) throws IOException {
org.springframework.core.io.Resource resource = storageUtil.loadAsResource(filename);
InputStream inputStream = resource.getInputStream();
InputStreamResource inputStreamResource = new InputStreamResource(inputStream);
String contentType = FileTypeMap.getDefaultFileTypeMap().getContentType(resource.getFile());
contentType = resource.getFilename().contains(".mp4") ? "video/mp4" : contentType;
response.setContentType(contentType);
HttpHeaders responseHeaders = new HttpHeaders();
responseHeaders.setContentType(MediaType.valueOf(contentType));
responseHeaders.setContentLength(resource.getFile().length());
return new ResponseEntity<>(inputStreamResource, responseHeaders, HttpStatus.OK);
}
import java.io.BufferedInputStream; import java.io.File; import
java.io.IOException; import java.io.InputStream; import
java.io.OutputStream; import java.nio.file.Files; import
java.nio.file.Path; import java.nio.file.Paths; import
java.nio.file.attribute.FileTime; import java.time.LocalDateTime;
import java.time.ZoneId; import java.time.ZoneOffset; import
java.util.ArrayList; import java.util.Arrays; import java.util.List;
import javax.servlet.ServletOutputStream; import
javax.servlet.http.HttpServletRequest; import
javax.servlet.http.HttpServletResponse; import
org.springframework.util.StringUtils;
/** * * #author David 1 */ public class MultipartFileSender {
private static final int DEFAULT_BUFFER_SIZE = 20480; // ..bytes = 20KB.
private static final long DEFAULT_EXPIRE_TIME = 604800000L; // ..ms = 1 week.
private static final String MULTIPART_BOUNDARY = "MULTIPART_BYTERANGES";
Path filepath;
HttpServletRequest request;
HttpServletResponse response;
public MultipartFileSender() {
}
public static MultipartFileSender fromPath(Path path) {
return new MultipartFileSender().setFilepath(path);
}
public static MultipartFileSender fromFile(File file) {
return new MultipartFileSender().setFilepath(file.toPath());
}
public static MultipartFileSender fromURIString(String uri) {
return new MultipartFileSender().setFilepath(Paths.get(uri));
}
//** internal setter **//
private MultipartFileSender setFilepath(Path filepath) {
this.filepath = filepath;
return this;
}
public MultipartFileSender with(HttpServletRequest httpRequest) {
request = httpRequest;
return this;
}
public MultipartFileSender with(HttpServletResponse httpResponse) {
response = httpResponse;
return this;
}
public void serveResource() throws Exception {
if (response == null || request == null) {
return;
}
if (!Files.exists(filepath)) {
System.out.println("File doesn't exist at URI : {" + filepath.toAbsolutePath().toString() + "}");
response.sendError(HttpServletResponse.SC_NOT_FOUND);
return;
}
Long length = Files.size(filepath);
String fileName = filepath.getFileName().toString();
FileTime lastModifiedObj = Files.getLastModifiedTime(filepath);
if (StringUtils.isEmpty(fileName) || lastModifiedObj == null) {
response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
return;
}
long lastModified = LocalDateTime.ofInstant(lastModifiedObj.toInstant(),
ZoneId.of(ZoneOffset.systemDefault().getId())).toEpochSecond(ZoneOffset.UTC);
String contentType = "video/mp4";
// Validate request headers for caching ---------------------------------------------------
// If-None-Match header should contain "*" or ETag. If so, then return 304.
String ifNoneMatch = request.getHeader("If-None-Match");
if (ifNoneMatch != null && HttpUtils.matches(ifNoneMatch, fileName)) {
response.setHeader("ETag", fileName); // Required in 304.
response.sendError(HttpServletResponse.SC_NOT_MODIFIED);
return;
}
// If-Modified-Since header should be greater than LastModified. If so, then return 304.
// This header is ignored if any If-None-Match header is specified.
long ifModifiedSince = request.getDateHeader("If-Modified-Since");
if (ifNoneMatch == null && ifModifiedSince != -1 && ifModifiedSince + 1000 > lastModified) {
response.setHeader("ETag", fileName); // Required in 304.
response.sendError(HttpServletResponse.SC_NOT_MODIFIED);
return;
}
// Validate request headers for resume ----------------------------------------------------
// If-Match header should contain "*" or ETag. If not, then return 412.
String ifMatch = request.getHeader("If-Match");
if (ifMatch != null && !HttpUtils.matches(ifMatch, fileName)) {
response.sendError(HttpServletResponse.SC_PRECONDITION_FAILED);
return;
}
// If-Unmodified-Since header should be greater than LastModified. If not, then return 412.
long ifUnmodifiedSince = request.getDateHeader("If-Unmodified-Since");
if (ifUnmodifiedSince != -1 && ifUnmodifiedSince + 1000 <= lastModified) {
response.sendError(HttpServletResponse.SC_PRECONDITION_FAILED);
return;
}
// Validate and process range -------------------------------------------------------------
// Prepare some variables. The full Range represents the complete file.
Range full = new Range(0, length - 1, length);
List<Range> ranges = new ArrayList<>();
// Validate and process Range and If-Range headers.
String range = request.getHeader("Range");
if (range != null) {
// Range header should match format "bytes=n-n,n-n,n-n...". If not, then return 416.
if (!range.matches("^bytes=\\d*-\\d*(,\\d*-\\d*)*$")) {
response.setHeader("Content-Range", "bytes */" + length); // Required in 416.
response.sendError(HttpServletResponse.SC_REQUESTED_RANGE_NOT_SATISFIABLE);
return;
}
String ifRange = request.getHeader("If-Range");
if (ifRange != null && !ifRange.equals(fileName)) {
try {
long ifRangeTime = request.getDateHeader("If-Range"); // Throws IAE if invalid.
if (ifRangeTime != -1) {
ranges.add(full);
}
} catch (IllegalArgumentException ignore) {
ranges.add(full);
}
}
// If any valid If-Range header, then process each part of byte range.
if (ranges.isEmpty()) {
for (String part : range.substring(6).split(",")) {
// Assuming a file with length of 100, the following examples returns bytes at:
// 50-80 (50 to 80), 40- (40 to length=100), -20 (length-20=80 to length=100).
long start = Range.sublong(part, 0, part.indexOf("-"));
long end = Range.sublong(part, part.indexOf("-") + 1, part.length());
if (start == -1) {
start = length - end;
end = length - 1;
} else if (end == -1 || end > length - 1) {
end = length - 1;
}
// Check if Range is syntactically valid. If not, then return 416.
if (start > end) {
response.setHeader("Content-Range", "bytes */" + length); // Required in 416.
response.sendError(HttpServletResponse.SC_REQUESTED_RANGE_NOT_SATISFIABLE);
return;
}
// Add range.
ranges.add(new Range(start, end, length));
}
}
}
// Prepare and initialize response --------------------------------------------------------
// Get content type by file name and set content disposition.
String disposition = "inline";
// If content type is unknown, then set the default value.
// For all content types, see: http://www.w3schools.com/media/media_mimeref.asp
// To add new content types, add new mime-mapping entry in web.xml.
if (contentType == null) {
contentType = "application/octet-stream";
} else if (!contentType.startsWith("image")) {
// Else, expect for images, determine content disposition. If content type is supported by
// the browser, then set to inline, else attachment which will pop a 'save as' dialogue.
String accept = request.getHeader("Accept");
disposition = accept != null && HttpUtils.accepts(accept, contentType) ? "inline" : "attachment";
}
System.out.println("Content-Type : {" + contentType + "}");
// Initialize response.
response.reset();
response.setBufferSize(DEFAULT_BUFFER_SIZE);
response.setHeader("Content-Type", contentType);
response.setHeader("Content-Disposition", disposition + ";filename=\"" + fileName + "\"");
System.out.println("Content-Disposition : {" + disposition + "}");
response.setHeader("Accept-Ranges", "bytes");
response.setHeader("ETag", fileName);
response.setDateHeader("Last-Modified", lastModified);
response.setDateHeader("Expires", System.currentTimeMillis() + DEFAULT_EXPIRE_TIME);
// Send requested file (part(s)) to client ------------------------------------------------
// Prepare streams.
try (InputStream input = new BufferedInputStream(Files.newInputStream(filepath));
OutputStream output = response.getOutputStream()) {
if (ranges.isEmpty() || ranges.get(0) == full) {
// Return full file.
System.out.println("Return full file");
response.setContentType(contentType);
response.setHeader("Content-Range", "bytes " + full.start + "-" + full.end + "/" + full.total);
response.setHeader("Content-Length", String.valueOf(full.length));
Range.copy(input, output, length, full.start, full.length);
} else if (ranges.size() == 1) {
// Return single part of file.
Range r = ranges.get(0);
System.out.println("Return 1 part of file : from ({" + r.start + "}) to ({" + r.end + "})");
response.setContentType(contentType);
response.setHeader("Content-Range", "bytes " + r.start + "-" + r.end + "/" + r.total);
response.setHeader("Content-Length", String.valueOf(r.length));
response.setStatus(HttpServletResponse.SC_PARTIAL_CONTENT); // 206.
// Copy single part range.
Range.copy(input, output, length, r.start, r.length);
} else {
// Return multiple parts of file.
response.setContentType("multipart/byteranges; boundary=" + MULTIPART_BOUNDARY);
response.setStatus(HttpServletResponse.SC_PARTIAL_CONTENT); // 206.
// Cast back to ServletOutputStream to get the easy println methods.
ServletOutputStream sos = (ServletOutputStream) output;
// Copy multi part range.
for (Range r : ranges) {
System.out.println("Return multi part of file : from ({" + r.start + "}) to ({" + r.end + "})");
// Add multipart boundary and header fields for every range.
sos.println();
sos.println("--" + MULTIPART_BOUNDARY);
sos.println("Content-Type: " + contentType);
sos.println("Content-Range: bytes " + r.start + "-" + r.end + "/" + r.total);
// Copy single part range of multi part range.
Range.copy(input, output, length, r.start, r.length);
}
// End with multipart boundary.
sos.println();
sos.println("--" + MULTIPART_BOUNDARY + "--");
}
}
}
private static class Range {
long start;
long end;
long length;
long total;
/**
* Construct a byte range.
*
* #param start Start of the byte range.
* #param end End of the byte range.
* #param total Total length of the byte source.
*/
public Range(long start, long end, long total) {
this.start = start;
this.end = end;
this.length = end - start + 1;
this.total = total;
}
public static long sublong(String value, int beginIndex, int endIndex) {
String substring = value.substring(beginIndex, endIndex);
return (substring.length() > 0) ? Long.parseLong(substring) : -1;
}
private static void copy(InputStream input, OutputStream output, long inputSize, long start, long length) throws IOException {
byte[] buffer = new byte[DEFAULT_BUFFER_SIZE];
int read;
if (inputSize == length) {
// Write full range.
while ((read = input.read(buffer)) > 0) {
output.write(buffer, 0, read);
output.flush();
}
} else {
input.skip(start);
long toRead = length;
while ((read = input.read(buffer)) > 0) {
if ((toRead -= read) > 0) {
output.write(buffer, 0, read);
output.flush();
} else {
output.write(buffer, 0, (int) toRead + read);
output.flush();
break;
}
}
}
}
}
private static class HttpUtils {
/**
* Returns true if the given accept header accepts the given value.
*
* #param acceptHeader The accept header.
* #param toAccept The value to be accepted.
* #return True if the given accept header accepts the given value.
*/
public static boolean accepts(String acceptHeader, String toAccept) {
String[] acceptValues = acceptHeader.split("\\s*(,|;)\\s*");
Arrays.sort(acceptValues);
return Arrays.binarySearch(acceptValues, toAccept) > -1
|| Arrays.binarySearch(acceptValues, toAccept.replaceAll("/.*$", "/*")) > -1
|| Arrays.binarySearch(acceptValues, "*/*") > -1;
}
/**
* Returns true if the given match header matches the given value.
*
* #param matchHeader The match header.
* #param toMatch The value to be matched.
* #return True if the given match header matches the given value.
*/
public static boolean matches(String matchHeader, String toMatch) {
String[] matchValues = matchHeader.split("\\s*,\\s*");
Arrays.sort(matchValues);
return Arrays.binarySearch(matchValues, toMatch) > -1
|| Arrays.binarySearch(matchValues, "*") > -1;
}
} }
use is in your controller and make sure you are not use #RestController
you should use #Controller, and following class in void method
#RequestMapping(method = RequestMethod.GET, value = "/getFile")
public void getFile(HttpServletRequest httpRequest, HttpServletResponse httpResponse) throws Exception {
MultipartFileSender.fromPath(Paths.get("D:\8561523973120206.mp4"))
.with(httpRequest)
.with(httpResponse)
.serveResource();
}
I have some trouble with my Jersey client (2.14).
I sent requests to my server (Jetty 8.1) every 100 ms. For 10 seconds there is no problems but every 10 seconds I have a request which remains blocked 2 seconds...
The trouble doesn't seem to come from jetty ...
Somebody got similar issue ?
private class PostJob implements Runnable {
#Override
public void run() {
while (start) {
try {
final HttpData take = daQueu.take();
final String path = take.path();
final Entity<Object> arg = take.arg();
for (final Map.Entry<String, ClientHandler> e : clients.entrySet()) {
final ClientHandler clientHandler = e.getValue();
long currentTimeMillis = System.currentTimeMillis();
Invocation.Builder request = null;
if (REQUEST_CACHE.containsKey(path)) {
request = REQUEST_CACHE.get(path);
} else {
LOGGER.debug("build new request");
request = clientHandler.client.target(path).request(MediaType.APPLICATION_JSON_TYPE);
REQUEST_CACHE.put(path, request);
}
Response post = request.post(arg);
LOGGER.debug("Execution delay for sending: {}", System.currentTimeMillis() - currentTimeMillis);
post.close();
}
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}
I'm pretty sure that it doesn't come from Jetty Server... (but ....).
My server is embedded in Karaf container and is started by Pax-web feature.
Following a part of my server code which register my servlets:
final ResourceConfig restServletConf = new ResourceConfig()
.register(JacksonFeature.class).register(MultiPartFeature.class).register(SseFeature.class).register(JsonProcessingFeature.class);
restServletConf.registerInstances(restResources);
final HttpServlet restServlet = new ServletContainer(restServletConf);
try {
webContainer.registerServlet(conf.servletPath, restServlet, null, restService);
logService.log(LogService.LOG_INFO, "Serving JAX-RS resources from " + conf.servletPath);
// Servlet filters must be started after the REST servlet
// (constraint from PAX-Web's WebContainer API).
restService.start();
} catch (Exception e) {
logService.log(LogService.LOG_ERROR, "Failed to start JAX-RS servlet", e);
}
In my task I have to invoke camera in a button click and take picture and have to save it and display the image in the same screen. I have tried it and succeed in emulator. but its not working in real device. getting some errors. tried a lot. but cant able to find out the issue. more over, Its working perfectly in 9700 emulator and showing some error in 9500.
public class CameraScreen extends MainScreen implements FieldChangeListener
{
/** The camera's video controller */
private VideoControl _videoControl;
private Field _videoField;
private EncodingProperties[] _encodings;
private int _indexOfEncoding = 0;
private static String FILE_NAME = System.getProperty("fileconn.dir.photos")+"IMAGE"; //"file:///SDCard/" + "myphotos/" + "IMAGE";//
private static String EXTENSION = ".bmp";
private static int _counter;
int flag = 0;
BitmapField imageField = new BitmapField();
HorizontalFieldManager menuBar = new HorizontalFieldManager(Field.USE_ALL_WIDTH);
VerticalFieldManager main_vfm = new VerticalFieldManager();
VerticalFieldManager camera_vfm = new VerticalFieldManager();
VerticalFieldManager image_vfm = new VerticalFieldManager();
ButtonField bt = new ButtonField("Click",ButtonField.CONSUME_CLICK);
ButtonField front_bt = new ButtonField("Front",ButtonField.CONSUME_CLICK);
ButtonField back_bt = new ButtonField("Back",ButtonField.CONSUME_CLICK);
ButtonField side1_bt = new ButtonField("Side 1",ButtonField.CONSUME_CLICK);
ButtonField side2_bt = new ButtonField("Side 2",ButtonField.CONSUME_CLICK);
public CameraScreen()
{
setTitle("First Screen");
bt.setChangeListener(this);
front_bt.setChangeListener(this);
back_bt.setChangeListener(this);
side1_bt.setChangeListener(this);
side2_bt.setChangeListener(this);
image_vfm.add(menuBar);
menuBar.add(front_bt);
menuBar.add(back_bt);
menuBar.add(side1_bt);
menuBar.add(side2_bt);
image_vfm.add(bt);
try {
Bitmap image = Bitmap.createBitmapFromBytes( readFile(),0, -1, 5 );
imageField.setBitmap(image);
image_vfm.add(imageField);
} catch(Exception e) {
System.out.println(e.toString());
}
main_vfm.add(image_vfm);
add(main_vfm);
front_bt.setFocus();
// Initialize the camera object and video field
initializeCamera();
// Initialize the list of possible encodings
initializeEncodingList();
// If the field was constructed successfully, create the UI
if(_videoField != null)
{
createUI();
}
// If not, display an error message to the user
else
{
camera_vfm.add( new RichTextField( "Error connecting to camera." ) );
}
}
/**
* Takes a picture with the selected encoding settings
*/
public void takePicture()
{
try
{
// A null encoding indicates that the camera should
// use the default snapshot encoding.
String encoding = null;
if( _encodings != null )
{
// Use the user-selected encoding
encoding = _encodings[_indexOfEncoding].getFullEncoding();
}
// Retrieve the raw image from the VideoControl and
// create a screen to display the image to the user.
createImageScreen( _videoControl.getSnapshot( encoding ) );
}
catch(Exception e)
{
home.errorDialog("ERROR " + e.getClass() + ": " + e.getMessage());
}
}
/**
* Prevent the save dialog from being displayed
* #see net.rim.device.api.ui.container.MainScreen#onSavePrompt()
*/
protected boolean onSavePrompt()
{
return true;
}
/**
* Initializes the Player, VideoControl and VideoField
*/
private void initializeCamera()
{
try
{
// Create a player for the Blackberry's camera
Player player = Manager.createPlayer( "capture://video" );
// Set the player to the REALIZED state (see Player javadoc)
player.realize();
// Grab the video control and set it to the current display
_videoControl = (VideoControl)player.getControl( "VideoControl" );
if (_videoControl != null)
{
// Create the video field as a GUI primitive (as opposed to a
// direct video, which can only be used on platforms with
// LCDUI support.)
_videoField = (Field) _videoControl.initDisplayMode (VideoControl.USE_GUI_PRIMITIVE, "net.rim.device.api.ui.Field");
_videoControl.setDisplayFullScreen(true);
//_videoControl.setDisplaySize(50, 50);
_videoControl.setVisible(true);
}
// Set the player to the STARTED state (see Player javadoc)
player.start();
}
catch(Exception e)
{
home.errorDialog("ERROR " + e.getClass() + ": " + e.getMessage());
}
}
/**
* Initialize the list of encodings
*/
private void initializeEncodingList()
{
try
{
// Retrieve the list of valid encodings
String encodingString = System.getProperty("video.snapshot.encodings");
// Extract the properties as an array of word
String[] properties = StringUtilities.stringToKeywords(encodingString);
// The list of encodings
Vector encodingList = new Vector();
//Strings representing the four properties of an encoding as
//returned by System.getProperty().
String encoding = "encoding";
String width = "width";
String height = "height";
String quality = "quality";
EncodingProperties temp = null;
for(int i = 0; i < properties.length ; ++i)
{
if( properties[i].equals(encoding))
{
if(temp != null && temp.isComplete())
{
// Add a new encoding to the list if it has been
// properly set.
encodingList.addElement( temp );
}
temp = new EncodingProperties();
// Set the new encoding's format
++i;
temp.setFormat(properties[i]);
}
else if( properties[i].equals(width))
{
// Set the new encoding's width
++i;
temp.setWidth(properties[i]);
}
else if( properties[i].equals(height))
{
// Set the new encoding's height
++i;
temp.setHeight(properties[i]);
}
else if( properties[i].equals(quality))
{
// Set the new encoding's quality
++i;
temp.setQuality(properties[i]);
}
}
// If there is a leftover complete encoding, add it.
if(temp != null && temp.isComplete())
{
encodingList.addElement( temp );
}
// Convert the Vector to an array for later use
_encodings = new EncodingProperties[ encodingList.size() ];
encodingList.copyInto((Object[])_encodings);
}
catch (Exception e)
{
// Something is wrong, indicate that there are no encoding options
_encodings = null;
home.errorDialog(e.toString());
}
}
/**
* Adds the VideoField to the screen
*/
private void createUI()
{
// Add the video field to the screen
camera_vfm.add(_videoField);
}
/**
* Create a screen used to display a snapshot
* #param raw A byte array representing an image
*/
private void createImageScreen( byte[] raw )
{
main_vfm.replace(camera_vfm, image_vfm);
fieldChanged(raw);
Bitmap image1 = Bitmap.createBitmapFromBytes( readFile(),0, -1, 5 );
try{
if(flag == 1){
}
else{
image_vfm.delete(imageField);
}
imageField.setBitmap(image1);
image_vfm.add(imageField);
}
catch(Exception e){System.out.println(e.toString());}
}
private byte[] readFile() {
byte[] result = null;
FileConnection fconn = null;
try {
fconn = (FileConnection)Connector.open(FILE_NAME + "_front" + EXTENSION);
} catch (IOException e) {
System.out.print("Error opening file");
}
if (!fconn.exists()) {
//Dialog.inform("file not exist");
} else {
InputStream in = null;
ByteVector bytes = new ByteVector();
try {
in = fconn.openInputStream();
} catch (IOException e) {
System.out.print("Error opening input stream");
}
try {
int c = in.read();
while (-1 != c) {
bytes.addElement((byte) c);
c = in.read();
}
result = bytes.getArray();
} catch (IOException e) {
System.out.print("Error reading input stream");
}
try {
fconn.close();
} catch (IOException e) {
System.out.print("Error closing file");
}
}
return result;
}
public void fieldChanged( final byte[] _raw )
{
try
{
flag ++;
// Create the connection to a file that may or
// may not exist.
FileConnection file = (FileConnection)Connector.open(FILE_NAME + "_front" + EXTENSION);
// If the file exists, increment the counter until we find
// one that hasn't been created yet.
if (file.exists()) {
file.delete();
file.close();
file = (FileConnection) Connector.open(FILE_NAME + "_front" + EXTENSION);
}
//FileConnection file_temp = (FileConnection)Connector.open(FILE_NAME + "tempimg" + EXTENSION);
//file_temp.delete();
// We know the file doesn't exist yet, so create it
file.create();
// Write the image to the file
OutputStream out = file.openOutputStream();
out.write(_raw);
// Close the connections
//out.close();
file.close();
//Dialog.inform( "Saved to " + FILE_NAME + "_front" + EXTENSION );
}
catch(Exception e)
{
home.errorDialog("ERROR " + e.getClass() + ": " + e.getMessage());
Dialog.inform( "File not saved this time");
}
}
/**
* Sets the index of the encoding in the 'encodingList' Vector
* #param index The index of the encoding in the 'encodingList' Vector
*/
public void setIndexOfEncoding(int index)
{
_indexOfEncoding = index;
}
/**
* #see net.rim.device.api.ui.Screen#invokeAction(int)
*/
protected boolean invokeAction(int action)
{
boolean handled = super.invokeAction(action);
if(!handled)
{
switch(action)
{
case ACTION_INVOKE: // Trackball click
{
takePicture();
return true;
}
}
}
return handled;
}
public void fieldChanged(Field field, int context) {
// TODO Auto-generated method stub
srn2 screen2 = new srn2();
srn3 screen3 = new srn3();
srn4 screen4 = new srn4();
if(field==bt)
{
main_vfm.replace(image_vfm, camera_vfm);
}
if(field==back_bt)
{
UiApplication.getUiApplication().popScreen(UiApplication.getUiApplication().getActiveScreen());
UiApplication.getUiApplication().pushScreen(screen2);
}
if(field==side1_bt)
{
UiApplication.getUiApplication().popScreen(UiApplication.getUiApplication().getActiveScreen());
UiApplication.getUiApplication().pushScreen(screen3);
}
if(field==side2_bt)
{
UiApplication.getUiApplication().popScreen(UiApplication.getUiApplication().getActiveScreen());
UiApplication.getUiApplication().pushScreen(screen4);
}
}
}
Note: This Error displaying first "javax.microedition.media.MediaException: There is already another active Player. Call Player.close() on the existing Player to free up the resources." and camera gets open and when i try to take picture this error displays "Error Class Java.lang.ArrayIndexOutOfBoundsException: Index 0>=0"
After _videoControl.getSnapshot( encoding ) has been called you need to close player (i.e. to call player.close(). And that's exactly what the exception tells about.
However this method of taking images is highly unreliable - you'll not be able to use it for every BB device model. I don't know why RIM put it in SDK samples. By doing that RIM pushes developers to a wrong way. As alishaik786 mentiones a proper method of taking images on BB is using Invoke.invokeApplication(Invoke.APP_TYPE_CAMERA, new CameraArguments()) with a FileSystemJournalListener implementation. Just search on StackOverflow on these for the implementation details. I vaguely recall the implementation is painful (like many other parts on BB), but once done it will work on any device.
you got two error
1."javax.microedition.media.MediaException: There is already another active Player. Call Player.close()
this exception thrown if you try to open camera (player) while another instance of camera is already opened. In your code, you need to define the player object as a class level and must close the player after taking the snapshot (also need to close the player if you push from one screen to another).
2."Error Class Java.lang.ArrayIndexOutOfBoundsException: Index 0>=0"
This error may occur when you access encoding array while encoding array size is zero.
You can ensure this issue by using _videoControl.getSnapshot( null), which take the snapshot in default encoding.
So first insure these issue and reply me.
I'm using SSHExec ant task to connect to a remote host and I depend on the environment variables that are set on the remote host in order to be able to successfully execute some commands.
<sshexec host="somehost"
username="${username}"
password="${password}"
command="set"/>
Using the task the env. variables that are outputed are not the same as the ones I get when I log in using an SSH Client.
How can I make the env. variables of the remote host avaiable for the session?
Actually there is something you can do about the fact it doesn't start a shell. Use the following:
<sshexec command="/bin/bash -l yourScript.sh" .../>
Using /bin/bash -l will start an login shell then execute your script within that shell. It would be exactly as if you had a version of sshexec that properly starts up a login shell. It has to be a script. If you want to run a single executable command you can do this:
<sshexec command="/bin/bash -l -c 'echo $CATALINA_HOME'" .../>
I've found out that the current SSHExeec task implementation is using JSCh's ChannelExec (remote execution of commands) instead of a ChannelShell (remote shell) as connection channel.
That means that apparentely as per JSCh's current implementation a ChannelExec doesn't load env. variables.
I'm still not sure wether this is a limitation on the protocol or on the API.
The conclusion is that as for now there's no solution for the problem, unless you implement your own Ant task.
A working draft of how it would be:
package org.apache.tools.ant.taskdefs.optional.ssh;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.io.StringReader;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.Project;
import org.apache.tools.ant.types.Resource;
import org.apache.tools.ant.types.resources.FileResource;
import org.apache.tools.ant.util.FileUtils;
import org.apache.tools.ant.util.KeepAliveOutputStream;
import org.apache.tools.ant.util.TeeOutputStream;
import com.jcraft.jsch.Channel;
import com.jcraft.jsch.ChannelExec;
import com.jcraft.jsch.JSchException;
import com.jcraft.jsch.Session;
/**
* Executes a command on a remote machine via ssh.
* #since Ant 1.6 (created February 2, 2003)
*/
public class SSHExecShellSupport extends SSHBase {
private static final String COMMAND_SEPARATOR = System.getProperty("line.separator");
private static final int BUFFER_SIZE = 8192;
private static final int RETRY_INTERVAL = 500;
/** the command to execute via ssh */
private String command = null;
/** units are milliseconds, default is 0=infinite */
private long maxwait = 0;
/** for waiting for the command to finish */
private Thread thread = null;
private String outputProperty = null; // like <exec>
private File outputFile = null; // like <exec>
private boolean append = false; // like <exec>
private Resource commandResource = null;
private boolean isShellMode;
private long maxTimeWithoutAnyData = 1000*10;
private static final String TIMEOUT_MESSAGE =
"Timeout period exceeded, connection dropped.";
public long getMaxTimeWithoutAnyData() {
return maxTimeWithoutAnyData;
}
public void setMaxTimeWithoutAnyData(long maxTimeWithoutAnyData) {
this.maxTimeWithoutAnyData = maxTimeWithoutAnyData;
}
public boolean isShellMode() {
return isShellMode;
}
public void setShellMode(boolean isShellMode) {
this.isShellMode = isShellMode;
}
/**
* Constructor for SSHExecTask.
*/
public SSHExecShellSupport() {
super();
}
/**
* Sets the command to execute on the remote host.
*
* #param command The new command value
*/
public void setCommand(String command) {
this.command = command;
}
/**
* Sets a commandResource from a file
* #param f the value to use.
* #since Ant 1.7.1
*/
public void setCommandResource(String f) {
this.commandResource = new FileResource(new File(f));
}
/**
* The connection can be dropped after a specified number of
* milliseconds. This is sometimes useful when a connection may be
* flaky. Default is 0, which means "wait forever".
*
* #param timeout The new timeout value in seconds
*/
public void setTimeout(long timeout) {
maxwait = timeout;
}
/**
* If used, stores the output of the command to the given file.
*
* #param output The file to write to.
*/
public void setOutput(File output) {
outputFile = output;
}
/**
* Determines if the output is appended to the file given in
* <code>setOutput</code>. Default is false, that is, overwrite
* the file.
*
* #param append True to append to an existing file, false to overwrite.
*/
public void setAppend(boolean append) {
this.append = append;
}
/**
* If set, the output of the command will be stored in the given property.
*
* #param property The name of the property in which the command output
* will be stored.
*/
public void setOutputproperty(String property) {
outputProperty = property;
}
/**
* Execute the command on the remote host.
*
* #exception BuildException Most likely a network error or bad parameter.
*/
public void execute() throws BuildException {
if (getHost() == null) {
throw new BuildException("Host is required.");
}
if (getUserInfo().getName() == null) {
throw new BuildException("Username is required.");
}
if (getUserInfo().getKeyfile() == null
&& getUserInfo().getPassword() == null) {
throw new BuildException("Password or Keyfile is required.");
}
if (command == null && commandResource == null) {
throw new BuildException("Command or commandResource is required.");
}
if(isShellMode){
shellMode();
} else {
commandMode();
}
}
private void shellMode() {
final Object lock = new Object();
Session session = null;
try {
session = openSession();
final Channel channel=session.openChannel("shell");
final PipedOutputStream pipedOS = new PipedOutputStream();
PipedInputStream pipedIS = new PipedInputStream(pipedOS);
final Thread commandProducerThread = new Thread("CommandsProducerThread"){
public void run() {
BufferedReader br = null;
try {
br = new BufferedReader(new InputStreamReader(commandResource.getInputStream()));
String singleCmd;
synchronized (lock) {
lock.wait(); // waits for the reception of the very first data (before commands are issued)
while ((singleCmd = br.readLine()) != null) {
singleCmd += COMMAND_SEPARATOR;
log("cmd : " + singleCmd, Project.MSG_INFO);
pipedOS.write(singleCmd.getBytes());
lock.notify();
try {
lock.wait();
} catch (InterruptedException e) {
log(e, Project.MSG_VERBOSE);
break;
}
}
log("Finished producing commands", Project.MSG_VERBOSE);
}
} catch (IOException e) {
log(e, Project.MSG_VERBOSE);
} catch (InterruptedException e) {
log(e, Project.MSG_VERBOSE);
} finally {
FileUtils.close(br);
}
}
};
ByteArrayOutputStream out = new ByteArrayOutputStream();
final TeeOutputStream tee = new TeeOutputStream(out, new KeepAliveOutputStream(System.out));
channel.setOutputStream(tee);
channel.setExtOutputStream(tee);
channel.setInputStream(pipedIS);
channel.connect();
// waits for it to finish receiving data response and then ask for another the producer to issue one more command
thread = new Thread("DataReceiverThread") {
public void run() {
long lastTimeConsumedData = System.currentTimeMillis(); // initializes the watch
try {
InputStream in = channel.getInputStream();
byte[] tmp = new byte[1024];
while (true) {
if(thread == null){ // works with maxTimeout (for the whole task to complete)
break;
}
while (in.available() > 0) {
int i = in.read(tmp, 0, 1024);
lastTimeConsumedData = System.currentTimeMillis();
if (i < 0){
break;
}
tee.write(tmp, 0, i);
}
if (channel.isClosed()) {
log("exit-status: " + channel.getExitStatus(), Project.MSG_INFO);
log("channel.isEOF(): " + channel.isEOF(), Project.MSG_VERBOSE);
log("channel.isConnected(): " + channel.isConnected(), Project.MSG_VERBOSE);
throw new BuildException("Connection lost."); // NOTE: it also can happen that if one of the command are "exit" the channel will be closed!
}
synchronized(lock){
long elapsedTimeWithoutData = (System.currentTimeMillis() - lastTimeConsumedData);
if (elapsedTimeWithoutData > maxTimeWithoutAnyData) {
log(elapsedTimeWithoutData / 1000 + " secs elapsed without any data reception. Notifying command producer.", Project.MSG_VERBOSE);
lock.notify(); // command producer is waiting for this
try {
lock.wait(500); // wait til we have new commands.
Thread.yield();
log("Continuing consumer loop. commandProducerThread.isAlive()?" + commandProducerThread.isAlive(), Project.MSG_VERBOSE);
if(!commandProducerThread.isAlive()){
log("No more commands to be issued and it's been too long without data reception. Exiting consumer.", Project.MSG_VERBOSE);
break;
}
} catch (InterruptedException e) {
log(e, Project.MSG_VERBOSE);
break;
}
lastTimeConsumedData = System.currentTimeMillis(); // resets watch
}
}
}
} catch (IOException e) {
throw new BuildException(e);
}
}
};
thread.start();
commandProducerThread.start();
thread.join(maxwait);
if (thread.isAlive()) {
// ran out of time
thread = null;
if (getFailonerror()) {
throw new BuildException(TIMEOUT_MESSAGE);
} else {
log(TIMEOUT_MESSAGE, Project.MSG_ERR);
}
} else {
//success
if (outputFile != null) {
writeToFile(out.toString(), append, outputFile);
}
// this is the wrong test if the remote OS is OpenVMS,
// but there doesn't seem to be a way to detect it.
log("Exit status (not reliable): " + channel.getExitStatus(), Project.MSG_INFO);
// int ec = channel.getExitStatus(); FIXME
// if (ec != 0) {
// String msg = "Remote command failed with exit status " + ec;
// if (getFailonerror()) {
// throw new BuildException(msg);
// } else {
// log(msg, Project.MSG_ERR);
// }
// }
}
} catch (Exception e){
throw new BuildException(e);
} finally {
if (session != null && session.isConnected()) {
session.disconnect();
}
}
}
private void commandMode() {
Session session = null;
try {
session = openSession();
/* called once */
if (command != null) {
log("cmd : " + command, Project.MSG_INFO);
ByteArrayOutputStream out = executeCommand(session, command);
if (outputProperty != null) {
//#bugzilla 43437
getProject().setNewProperty(outputProperty, command + " : " + out);
}
} else { // read command resource and execute for each command
try {
BufferedReader br = new BufferedReader(
new InputStreamReader(commandResource.getInputStream()));
String cmd;
String output = "";
while ((cmd = br.readLine()) != null) {
log("cmd : " + cmd, Project.MSG_INFO);
ByteArrayOutputStream out = executeCommand(session, cmd);
output += cmd + " : " + out + "\n";
}
if (outputProperty != null) {
//#bugzilla 43437
getProject().setNewProperty(outputProperty, output);
}
FileUtils.close(br);
} catch (IOException e) {
throw new BuildException(e);
}
}
} catch (JSchException e) {
throw new BuildException(e);
} finally {
if (session != null && session.isConnected()) {
session.disconnect();
}
}
}
private ByteArrayOutputStream executeCommand(Session session, String cmd)
throws BuildException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
TeeOutputStream tee = new TeeOutputStream(out, new KeepAliveOutputStream(System.out));
try {
final ChannelExec channel;
session.setTimeout((int) maxwait);
/* execute the command */
channel = (ChannelExec) session.openChannel("exec");
channel.setCommand(cmd);
channel.setOutputStream(tee);
channel.setExtOutputStream(tee);
channel.connect();
// wait for it to finish
thread =
new Thread() {
public void run() {
while (!channel.isClosed()) {
if (thread == null) {
return;
}
try {
sleep(RETRY_INTERVAL);
} catch (Exception e) {
// ignored
}
}
}
};
thread.start();
thread.join(maxwait);
if (thread.isAlive()) {
// ran out of time
thread = null;
if (getFailonerror()) {
throw new BuildException(TIMEOUT_MESSAGE);
} else {
log(TIMEOUT_MESSAGE, Project.MSG_ERR);
}
} else {
//success
if (outputFile != null) {
writeToFile(out.toString(), append, outputFile);
}
// this is the wrong test if the remote OS is OpenVMS,
// but there doesn't seem to be a way to detect it.
int ec = channel.getExitStatus();
if (ec != 0) {
String msg = "Remote command failed with exit status " + ec;
if (getFailonerror()) {
throw new BuildException(msg);
} else {
log(msg, Project.MSG_ERR);
}
}
}
} catch (BuildException e) {
throw e;
} catch (JSchException e) {
if (e.getMessage().indexOf("session is down") >= 0) {
if (getFailonerror()) {
throw new BuildException(TIMEOUT_MESSAGE, e);
} else {
log(TIMEOUT_MESSAGE, Project.MSG_ERR);
}
} else {
if (getFailonerror()) {
throw new BuildException(e);
} else {
log("Caught exception: " + e.getMessage(),
Project.MSG_ERR);
}
}
} catch (Exception e) {
if (getFailonerror()) {
throw new BuildException(e);
} else {
log("Caught exception: " + e.getMessage(), Project.MSG_ERR);
}
}
return out;
}
/**
* Writes a string to a file. If destination file exists, it may be
* overwritten depending on the "append" value.
*
* #param from string to write
* #param to file to write to
* #param append if true, append to existing file, else overwrite
* #exception Exception most likely an IOException
*/
private void writeToFile(String from, boolean append, File to)
throws IOException {
FileWriter out = null;
try {
out = new FileWriter(to.getAbsolutePath(), append);
StringReader in = new StringReader(from);
char[] buffer = new char[BUFFER_SIZE];
int bytesRead;
while (true) {
bytesRead = in.read(buffer);
if (bytesRead == -1) {
break;
}
out.write(buffer, 0, bytesRead);
}
out.flush();
} finally {
if (out != null) {
out.close();
}
}
}
}
Another simple workaround is to source the user's .bash_profile before running your commands:
<sshexec host="somehost"
username="${username}"
password="${password}"
command="source ~/.bash_profile && set"/>
Great post chubbsondubs. I needed to set the ORACLE SID then execute a PLSQL script that does not have the proper exit. Hence the echo exit piped.
<sshexec host="${db.ipaddr}"
verbose="true"
trust="true"
username="${scp.oracle.userid}"
password="${scp.oracle.password}"
command="echo exit | /bin/bash -l -c 'export ORACLE_SID=${db.name} ; sqlplus ${db.dbo.userid}/${db.dbo.password} #./INSTALL_REVPORT/CreateDatabase/gengrant.sql'"
/>