Custom Interceptor not working with Apache Flume - flume

I have a Flume component listening a Syslog stream.
I made a custom Interceptor to modify the call, but it is not working. What did I make wrong?
Thank you,
Andrea
Interceptor is a well compiled JAR ile and is in #FLUME_HOME/bin directory
Interceptor class:
package com.test.flume;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.conf.Configurable;
import org.apache.flume.interceptor.Interceptor;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
public class SQLFlumeInterceptor implements Interceptor {
private final String headerKey;
private SQLFlumeInterceptor(Context ctx) {
}
#Override
public void initialize() {
}
#Override
public Event intercept(Event event) {
addPreposition(event);
return event;
}
private void addPreposition(Event event) {
System.out.println("Event processed");
event.setBody( "Modified Event".getBytes() );
}
#Override
public List<Event> intercept(List<Event> events) {
for (Iterator<Event> iterator = events.iterator(); iterator.hasNext(); ) {
Event next = iterator.next();
intercept(next);
if(next == null) {
iterator.remove();
}
}
return events;
}
#Override
public void close() {
}
public static class CounterInterceptorBuilder implements Interceptor.Builder {
private Context ctx;
#Override
public Interceptor build() {
return new SQLFlumeInterceptor(ctx);
}
#Override
public void configure(Context context) {
this.ctx = context;
}
}
flume.config file
# Name the components on this agent
a1.sources = r1
a1.sinks = file-sink
a1.channels = c1
# Describe/configure the source
a1.sources.r1.type = syslogtcp
a1.sources.r1.port = 41414
a1.sources.r1.host = 192.168.1.2
a1.sources.r1.interceptors = i1
a1.sources.r1.interceptors.i1.type = com.test.flume.SQLFlumeInterceptor$CounterInterceptorBuilder
# Describe the FILE_ROLLsink
a1.sinks.file-sink.type = FILE_ROLL
a1.sinks.file-sink.sink.directory = /opt/apache-flume-1.5.2-bin/logs/pluto.log
a1.sinks.file-sink.sink.rollInterval = 0
ai.sinks.file-sink.batchSize = 100
ai.sinks.file-sink.fileHeader = true
# Use a channel which buffers events in memory
a1.channels.c1.type = memory
a1.channels.c1.capacity = 1000
a1.channels.c1.transactionCapacity = 100
# Bind the source and sink to the channel
a1.sources.r1.channels = c1
a1.sinks.file-sink.channel = c1
The system logs the Event in a file without modifying them, this is the pertinent DEBUG log:
2015-04-27 21:39:17,625 (conf-file-poller-0) [DEBUG - org.apache.flume.conf.FlumeConfiguration$AgentConfiguration.isValid(FlumeConfiguration.java:313)] Starting validation of configuration for agent: a1, initial-configuration: AgentConfiguration[a1]
SOURCES: {r1={ parameters:{port=41414, host=192.168.1.2, interceptors=i1, interceptors.i1.type=com.test.flume.
SQLFlumeInterceptor$CounterInterceptorBuilder, channels=c1, type=syslogtcp} }}
CHANNELS: {c1={ parameters:{transactionCapacity=100, capacity=1000, type=memory} }}
SINKS: {file-sink={ parameters:{sink.rollInterval=0, type=FILE_ROLL, channel=c1, sink.directory=/opt/apache-flume-1.5.2-bin/logs/pluto.log} }}

Please place your Interceptor JAR file in #FLUME_HOME/lib directory, not in #FLUME_HOME/bin.
Otherwise flume will not load the JAR.

Related

Programmatically changing the level of the Appender reference of a Logger in log4j2

I tested the solution to a similar question
How can I change the AppenderRef Level in log4j2 programmatically?
But the solution is not working for the latest version of log4j2.xml - 2.17.0 .
Usecase is slight different than the question referred to . I am configuring Log4J programmatically, through a configuration factory :
package test.config;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.core.LoggerContext;
import org.apache.logging.log4j.core.appender.ConsoleAppender;
import org.apache.logging.log4j.core.config.*;
import org.apache.logging.log4j.core.config.builder.api.*;
import org.apache.logging.log4j.core.config.builder.impl.BuiltConfiguration;
import org.apache.logging.log4j.core.config.plugins.Plugin;
import simplica.util.CommonUtils;
import java.io.File;
import java.net.URI;
/**
* Configuration Factory to configure the Log4J2 Configuration programmatically
*
* #version 1.0
* #author Vishwanath Washimkar
*
*/
#Plugin(
name = "CustomConfigurationFactory",
category = ConfigurationFactory.CATEGORY)
#Order(50)
public class CustomConfigurationFactory extends ConfigurationFactory {
private LayoutComponentBuilder layoutBuilder;
private String logDirPath;
private EmailAppender emailAppender;
private String debugLoglevel;
private AppConfig appConfig;
private static ConfigurationBuilder<BuiltConfiguration> builder;
public static LoggerComponentBuilder labwareLogger;
/**
* Related to FileAppenders
*/
private final static String APPN_LAYOUT_PATTERN = "%d{MM-dd#HH:mm:ss}%-4r %-5p [%t] %37c %3x - %m%n";
public CustomConfigurationFactory(){
builder = newConfigurationBuilder();
appConfig = AppConfig.getInstance();
if(appConfig != null){
logDirPath = appConfig.getWeblimsLogPath();
emailAppender = appConfig.getEmailAppender();
}
}
#Override
protected String[] getSupportedTypes() {
return new String[] { "*" };
}
#Override
public Configuration getConfiguration(LoggerContext loggerContext, ConfigurationSource source) {
return null;
}
#Override
public Configuration getConfiguration(final LoggerContext loggerContext, final String name, final URI configLocation) {
return createConfiguration();
}
public Configuration createConfiguration() {
builder.setStatusLevel(Level.WARN);
builder.setConfigurationName("Test");
builder.setMonitorInterval("5");
layoutBuilder = builder.newLayout("PatternLayout").addAttribute("pattern", APPN_LAYOUT_PATTERN);
RootLoggerComponentBuilder rootLogger = builder.newRootLogger(Level.DEBUG);
if (CommonUtils.isValue(logDirPath)) {
File file = new File(logDirPath);
if (file.isDirectory() && file.canWrite()) {
configureWithFileAppender(builder, rootLogger);
} else {
configureWithConsoleAppender(builder, rootLogger);
}
} else {
//this is the last re course as every other configuration has failed
configureWithConsoleAppender(builder,rootLogger);
}
if(emailAppender != null){
if(!emailAppender.getEnabled()){
//add and enable email appender
configureEmailAppender(builder,rootLogger);
}
}
builder.add(rootLogger);
return builder.build();
}
private void configureEmailAppender(ConfigurationBuilder<BuiltConfiguration> builder, RootLoggerComponentBuilder rootLogger) {
String emailAppenderName = "emailAppender";
AppenderComponentBuilder smtpBuilder = builder.newAppender("emailAppender", "SMTP")//
.addAttribute("smtpUsername", emailAppender.getSMTPUsername())
.addAttribute("smtpPassword", emailAppender.getSMTPPassword())
.addAttribute("smtpProtocol", emailAppender.getSmtpProtocol())
.addAttribute("smtpHost", emailAppender.getSMTPHost())
.addAttribute("to", emailAppender.getTo())
.addAttribute("subject", emailAppender.getSubject())
.addAttribute("Cc", emailAppender.getCc())
.addAttribute("Bcc", emailAppender.getBcc())
.add(layoutBuilder);
builder.add(smtpBuilder);
if(emailAppender.getEnabled()){
rootLogger.add(builder.newAppenderRef(emailAppenderName));
}
}
private void configureWithConsoleAppender(ConfigurationBuilder<BuiltConfiguration> builder, RootLoggerComponentBuilder rootLogger) {
if(builder == null) throw new IllegalArgumentException("builder cannot be null");
// create a console appender
AppenderComponentBuilder appenderBuilder = builder.newAppender("console", "CONSOLE").addAttribute("target", ConsoleAppender.Target.SYSTEM_OUT);
appenderBuilder.add(layoutBuilder);
builder.add(appenderBuilder);
rootLogger.add(builder.newAppenderRef("console"));
}
private void configureWithFileAppender(ConfigurationBuilder<BuiltConfiguration> builder, RootLoggerComponentBuilder rootLogger) {
if(builder == null) throw new IllegalArgumentException("builder cannot be null");
ComponentBuilder triggeringPolicy = builder.newComponent("Policies")
.addComponent(builder.newComponent("TimeBasedTriggeringPolicy").addAttribute("interval", "1"));
AppenderComponentBuilder debugLogBuilder = builder.newAppender("debugLog", "RollingFile")
.addAttribute("fileName", logDirPath + "\\" + "debug.log")
.addAttribute("filePattern", logDirPath + "\\" + "debug%d{MM-dd-yy}.log.gz")
.add(layoutBuilder)
.addComponent(triggeringPolicy);
AppenderComponentBuilder errorLogBuilder = builder.newAppender("errorLog", "RollingFile")
.addAttribute("fileName", logDirPath + "\\" + "error.log")
.addAttribute("filePattern", logDirPath + "\\" + "error-%d{MM-dd-yy}.log.gz")
.add(layoutBuilder)
.addComponent(triggeringPolicy);
builder.add(debugLogBuilder);
builder.add(errorLogBuilder);
labwareLogger = builder.newLogger("test", Level.DEBUG );
labwareLogger.add(builder.newAppenderRef("debugLog").addAttribute("level", getDebugLogLevel()));
labwareLogger.add(builder.newAppenderRef("errorLog").addAttribute("level", Level.ERROR));
builder.add(labwareLogger);
rootLogger.add(builder.newAppenderRef("debugLog").addAttribute("level", Level.WARN));
rootLogger.add(builder.newAppenderRef("errorLog").addAttribute("level", Level.ERROR));
}
private Level getDebugLogLevel() {
String logLevel = appConfig.getDebugLogLevel();
if("DEBUG".equalsIgnoreCase(logLevel)){
return Level.DEBUG;
}
if("INFO".equalsIgnoreCase(logLevel)){
return Level.INFO;
}
if("ERROR".equalsIgnoreCase(logLevel)){
return Level.ERROR;
}
if("WARN".equalsIgnoreCase(logLevel)){
return Level.WARN;
}
if("FATAL".equalsIgnoreCase(logLevel)){
return Level.FATAL;
}
if("TRACE".equalsIgnoreCase(logLevel)){
return Level.TRACE;
}
if("ALL".equalsIgnoreCase(logLevel)){
return Level.ALL;
}
//at the end lets just return the
return Level.WARN;
}
}
Then I am trying to change the logging level for appenderRef in a servlet.
package com.example.testwebapp;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.core.LoggerContext;
import org.apache.logging.log4j.core.config.Configuration;
import org.apache.logging.log4j.core.config.LoggerConfig;
import java.io.*;
import javax.servlet.http.*;
import javax.servlet.annotation.*;
#WebServlet(name = "helloServlet", value = "/hello-servlet")
public class HelloServlet extends HttpServlet {
private String message;
public void init() {
message = "Hello World! from Vish";
}
int i =0;
public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException {
if(i > 2) {
LoggerContext context = (LoggerContext) LogManager.getContext(false);
Configuration config = context.getConfiguration();
LoggerConfig labware = config.getLoggerConfig("labware");
labware.removeAppender("debugLog");
labware.addAppender(config.getAppender("debugLog"), Level.DEBUG, null);
// This causes all Loggers to refetch information from their LoggerConfig.
context.updateLoggers();
}
i++;
response.setContentType("text/html");
getLog().debug("DEBUG log entryc 11111 ");
getLog().info("INFO log entry ");
getLog().error("ERROR log entry ");
getLog().warn("############# WAR log entry ");
// Hello
PrintWriter out = response.getWriter();
out.println("<html><body>");
out.println("<h1>" + message + " World </h1>");
out.println("</body></html>");
}
public void destroy() {
}
/**
* #return The logger for the class.
*/
private static Log getLog() {
return LogFactory.getLog(HelloServlet.class);
}
}

QAF: Implementation of Self healing(Healenium) in QAF

Am new to QAF and I need to implement self-healing in our test method using healenium. I have implemented it without QAF it's working fine. Please refer to the below code.
import com.epam.healenium.SelfHealingDriver;
import io.github.bonigarcia.wdm.WebDriverManager;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.openqa.selenium.Dimension;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.chrome.ChromeDriver;
import org.openqa.selenium.chrome.ChromeOptions;
import java.util.concurrent.TimeUnit;
public class BaseTest {
static protected SelfHealingDriver driver;
#BeforeAll
static public void setUp() {
WebDriverManager.chromedriver().setup();
ChromeOptions options = new ChromeOptions();
options.setHeadless(false);
//declare delegate
WebDriver delegate = new ChromeDriver(options);
driver = SelfHealingDriver.create(delegate);
driver.manage().timeouts().implicitlyWait(4, TimeUnit.SECONDS);
driver.manage().window().setSize(new Dimension(1200, 800));
}
#AfterAll
static public void afterAll() {
if (driver != null) {
driver.quit();
}
}
}
I just want to wrap this self-healing driver with a QAF web driver like above.
QAF discourage to use static class variable for driver. The code provided in question will not work for parallel execution. Driver management is taken care by qaf with thread safety and with different behavior that can be set using property selenium.singletone.
You can try following way when you want SelfHealingDriver:
public class SampleTest extends WebDriverTestCase {
#Test
public void yourTestCase(){
SelfHealingDriver driver = SelfHealingDriver.create(getDriver());
//your code goes below
}
}
SelfHealingDriver proxies actual driver. You can achieve the self heal functionality without driver proxy with listener for findelement/findChildelement. Driver listener should work without proxing driver. For example:
public class WDListener extends QAFWebDriverCommandAdapter {
private static final Map<String, Object> byToString = JSONUtil.toMap(
"{'ByCssSelector':'css selector','ByClassName':'class name','ByXPath':'xpath','ByPartialLinkText':'partial link text','ById':'id','ByLinkText':'link text','ByName':'name'}");
//this method will called when new driver object created
public void onInitialize(QAFExtendedWebDriver driver){
driver.manage().timeouts().implicitlyWait(4, TimeUnit.SECONDS);
driver.manage().window().setSize(new Dimension(1200, 800));
}
#Override
public void afterCommand(QAFExtendedWebDriver driver, CommandTracker commandTracker) {
if (DriverCommand.FIND_ELEMENT.equalsIgnoreCase(commandTracker.getCommand())
|| DriverCommand.FIND_ELEMENTS.equalsIgnoreCase(commandTracker.getCommand())
|| DriverCommand.FIND_CHILD_ELEMENT.equalsIgnoreCase(commandTracker.getCommand())
|| DriverCommand.FIND_CHILD_ELEMENTS.equalsIgnoreCase(commandTracker.getCommand())) {
Map<String, Object> parameters = commandTracker.getParameters();
if (parameters != null && parameters.containsKey("using") && parameters.containsKey("value")) {
By by = LocatorUtil
.getBy(String.format("%s=%s", parameters.get("using"), parameters.get("value")));
HealingServiceImpl healingServiceImpl = new HealingServiceImpl(new SelfHealingEngine(driver));
StackTraceElement[] stackTrace = Thread.currentThread().getStackTrace();
Object result = commandTracker.getResponce().getValue();
List<WebElement> webElements = List.class.isAssignableFrom(result.getClass())?(List<WebElement>) result:Collections.singletonList((WebElement)result)
healingServiceImpl.savePath(new PageAwareBy(driver.getTitle(),by),webElements);
}
}
}
#Override
public void onFailure(QAFExtendedWebDriver driver, CommandTracker commandTracker) {
// StackTraceElement[] stackTrace =
// commandTracker.getException().getStackTrace();
Map<String, Object> parameters = commandTracker.getParameters();
if (parameters != null && parameters.containsKey("using") && parameters.containsKey("value")) {
By by = LocatorUtil
.getBy(String.format("%s=%s", parameters.get("using"), parameters.get("value")));
StackTraceElement[] stackTrace = Thread.currentThread().getStackTrace();
HealingServiceImpl healingServiceImpl = new HealingServiceImpl(new SelfHealingEngine(driver));
Optional<By> healedBy = healingServiceImpl.healLocators(new PageAwareBy(driver.getTitle(),by), null, stackTrace) ;
if(healedBy.isPresent()) {
commandTracker.getParameters().putAll(toParams(healedBy.get()));
commandTracker.setRetry(true);
}
}
}
}

Publish FluxSink of different object types

I have an rsocket endpoint that responds with a flux:
#MessageMapping("responses")
Flux<?> deal(#Payload String message) {
return myService.generateResponses(message);
}
The responses can be any of 3 different types of objects produced asynchronously using the following code (if it worked):
public Flux<?> generateResponses(String request) {
// Setup response sinks
final FluxProcessor publish = EmitterProcessor.create().serialize();
final FluxSink<Response1> sink1 = publish.sink();
final FluxSink<Response2> sink2 = publish.sink();
final FluxSink<Response3> sink3 = publish.sink();
// Get async responses: starts new thread to gather responses and update sinks
new MyResponses(request, sink1, sink2, sink3)
// Return the Flux
Flux<?> output = Flux
.from(publish
.log());
}
The problem is that when I populate the sinks with different objects only the first sink is actually publishing back to the subscriber.
public class MyResponses extends CacheListenerAdapter {
private FluxSink<Response1> sink1;
private FluxSink<Response2> sink2;
private FluxSink<Response3> sink3;
// Constructor is omitted for brevity
#Override
public void afterCreate(EntryEvent event) {
if (event.getNewValue() instanceof Response1) {
Response1 r1 = (Response1)event.getNewValue();
sink1.next(r1);
}
if (event.getNewValue() instanceof Response2) {
Response2 r2 = (Response2)event.getNewValue();
sink2.next(r2);
}
if (event.getNewValue() instanceof Response3) {
Response3 r3 = (Response3)event.getNewValue();
sink3.next(r3);
}
}
}
If I make the sinks of type <?> then there's a .next error:
The method next(capture#2-of ?) in the type FluxSink<capture#2-of ?> is not applicable for the arguments (Response1)
Is there a better approach to this requirement?
The reason this did not work with different object was to do with Spring Boot Data Geode serialization of underlying object types. The way to get the object Flux to work was use 1 sink of type <Object>
public Flux<Object> generateResponses(String request) {
// Setup the Flux
EmitterProcessor<Object> emitter = EmitterProcessor.create();
FluxSink<Object> sink = emitter.sink(FluxSink.OverflowStrategy.LATEST);
// Get async responses: starts new thread to gather responses and update sinks
new MyResponses(request, sink)
// Setup an output Flux to publish the input Flux
Flux<Object> out = Flux
.from(emitter
.log(log.getName()));
}
The event handler then used the 1 sink
public class MyResponses extends CacheListenerAdapter {
private FluxSink<Object> sink;
// Constructor is omitted for brevity
#Override
public void afterCreate(EntryEvent event) {
if (event.getNewValue() instanceof Response1) {
Response1 r1 = (Response1)event.getNewValue();
sink.next(r1);
}
if (event.getNewValue() instanceof Response2) {
Response2 r2 = (Response2)event.getNewValue();
sink.next(r2);
}
if (event.getNewValue() instanceof Response3) {
Response3 r3 = (Response3)event.getNewValue();
sink.next(r3);
}
}
}

Flume Custom Interceptor is not Working

I want to change IP address in data by its mask IP. This is done in the "backup" part of my Flume agent (see below).
In this configuration there are 2 channels: the first channel dumps data to HBase, while the second one is used for backup:
a1.sources = r1 r2
a1.channels = channel1 Backup_channel
a1.sinks = FSink
a1.sources.r1.handler = com.flume.handler.JSONHandler
a1.sources.r1.type = avro
a1.sources.r1.bind = x.x.x.x
a1.sources.r1.port = 10008
a1.sources.r2.handler = com.flume.handler.JSONHandler
a1.sources.r2.type = avro
a1.sources.r2.bind = x.x.x.x
a1.sources.r2.port = 10009
a1.sources.r2.interceptors = i1
a1.sources.r2.interceptors.i1.type = com.flume.interceptor.DcInterceptor
a1.channels.channel1.type = file
a1.channels.channel1.checkpointDir = /root/flume/channels/Livechannel/checkpoint
a1.channels.channel1.dataDirs = /root/flume/channels/Livechannel/data
a1.sinks.FSink.type = hbase
a1.sinks.FSink.table = Temp_Test
a1.sinks.FSink.batchSize = 300
a1.sinks.FSink.columnFamily = T
a1.sinks.FSink.serializer = com.flume.sink.TestTP
a1.sources.r1.channels = channel1
a1.sources.r2.channels = Backup_channel
a1.channels.Backup_channel.type = file
a1.channels.Backup_channel.checkpointDir = /data/disk/flume/backup/checkpoint
a1.channels.Backup_channel.dataDirs = /data/disk/flume/backup/data
a1.sinks.FSink.channel = channel1
Following is my custom Java Interceptor Code. It implements the interception method, which get an IP address from the body, calculates its IP mask and then adds it to the body. But somehow it's not working:
public class DcInterceptor implements Interceptor {
private byte[] jsonTestBeans;
private final Type listType = new TypeToken < List < TestBeans >> () {}.getType();
#Override
public void close() {
// TODO Auto-generated method stub
}
#Override
public void initialize() {
// TODO Auto-generated method stub
new Logger();
}
#Override
public Event intercept(Event event) {
// TODO Auto-generated method stub
List < Row > actions = new ArrayList < Row > ();
this.jsonTestBeans = event.getBody();
Logger.logger.debug("In Interceptor");
System.out.println("In Interceptor");
Gson _Gson = new Gson();
String jsonstr = "";
try {
jsonstr = new String(jsonTestBeans, "UTF-8");
} catch (Exception e) {
// TODO: handle exception
Logger.logger.error(e.getMessage() + "In Interceptor");
jsonstr = new String(jsonTestBeans);
}
List < TestBeans > TestBeanss = _Gson.fromJson(jsonstr, listType);
System.out.println("Json String :" + jsonstr);
List < String > gTouch = new ArrayList < String > ();
for (TestBeans TestBeans: TestBeanss) {
String str = TestBeans.getIp();
Logger.logger.debug("IP : " + str);
String st = (str.substring(0, str.lastIndexOf(".") + 1) + "x");
Logger.logger.debug("Mask IP : " + st);
TestBeans.setRemoteIp(st);
}
event.setBody(_Gson.toJson(TestBeanss).getBytes());
Logger.logger.debug("Interceptor Ends");
return event;
}
#Override
public List < Event > intercept(List < Event > events) {
// TODO Auto-generated method stub
System.out.println("In List Interceptor");
Logger.logger.debug("In List Interceptor");
for (Event event: events) {
intercept(event);
}
return events;
}
public static class CounterInterceptorBuilder implements Interceptor.Builder {
private Context ctx;
#Override
public Interceptor build() {
Logger.logger.debug("In Interceptor Build");
System.out.println("In Build Interceptor");
return new DcInterceptor();
}
#Override
public void configure(Context context) {
this.ctx = context;
}
}
At least, I can see:
The configuration lines regarding your interceptor refer to an agent called ECircleTp_Test, while the rest of the configuration refer to a1.
You have configured com.flume.interceptor.DcInterceptor2, but the interceptor class you have developed is called DcInterceptor (without the final 2).
You have configured com.flume.interceptor.DcInterceptor2 as the fully qualified class name of your custom interceptor. Nevertheless, the code of the interceptor does not declare any package for the DcInterceptor(2) class.

BlackBerry - Get current Process ID

I read Blackberry - How to get the background application process id but I'm not sure I understand it correctly. The following code gets the foreground process id;
ApplicationManager.getApplicationManager().getForegroundProcessId()
I have two processes which execute the same piece of code to make a connection, I want to log the process which made the calls along with all my usual logging data to get a better idea of how the flow is working.
Is it possible to get the id for the process which is currently running the code? One process is in the foreground (UI process) and the other is in the background but both use the same connection library shared via the runtime store.
Thanks in advance!
Gav
So you have three modules: application, library and service.
You need to get descriptor by module name, and then get process id.
UPDATE1
String moduleName = "application";
int handle = CodeModuleManager.getModuleHandle(moduleName);
ApplicationDescriptor[] descriptors = CodeModuleManager
.getApplicationDescriptors(handle);
if (descriptors.length > 0 && descriptors[0] != null) {
ApplicationManager.getApplicationManager().getProcessId(descriptors[0]);
}
Then, to log which module uses library, use
Application.getApplication().getProcessId();
inside library methods. I think its better to implement logging inside library.
When you got process id of application from library code, you can compare it with id's found by module name and then you will know what module uses library code.
UPDATE2
alt text http://img138.imageshack.us/img138/23/eventlog.jpg
library module code:
package library;
import net.rim.device.api.system.Application;
import net.rim.device.api.system.ApplicationDescriptor;
import net.rim.device.api.system.ApplicationManager;
import net.rim.device.api.system.CodeModuleManager;
import net.rim.device.api.system.EventLogger;
public class Logger {
// "AppLibSrvc" converted to long
long guid = 0xd4b6b5eeea339daL;
public Logger() {
EventLogger.register(guid, "AppLibSrvc", EventLogger.VIEWER_STRING);
}
public void log(String message) {
EventLogger.logEvent(guid, message.getBytes());
}
public void call() {
log("Library is used by " + getModuleName());
}
private String getModuleName() {
String moduleName = "";
String appModuleName = "application";
int appProcessId = getProcessIdByName(appModuleName);
String srvcModuleName = "service";
int srvcProcessId = getProcessIdByName(srvcModuleName);
int processId = Application.getApplication().getProcessId();
if (appProcessId == processId)
moduleName = appModuleName;
else if (srvcProcessId == processId)
moduleName = srvcModuleName;
return moduleName;
}
protected int getProcessIdByName(String moduleName) {
int processId = -1;
int handle = CodeModuleManager.getModuleHandle(moduleName);
ApplicationDescriptor[] descriptors = CodeModuleManager
.getApplicationDescriptors(handle);
if (descriptors.length > 0 && descriptors[0] != null) {
processId = ApplicationManager.getApplicationManager()
.getProcessId(descriptors[0]);
}
return processId;
}
}
application module code:
package application;
import java.util.Timer;
import java.util.TimerTask;
import library.Logger;
import net.rim.device.api.ui.UiApplication;
import net.rim.device.api.ui.container.MainScreen;
public class App extends UiApplication {
public App() {
pushScreen(new Scr());
}
public static void main(String[] args) {
App app = new App();
app.enterEventDispatcher();
}
}
class Scr extends MainScreen {
public Scr() {
Timer timer = new Timer();
TimerTask task = new TimerTask() {
public void run() {
Logger logger = new Logger();
logger.call();
}
};
timer.schedule(task, 3000, 3000);
}
}
service module code:
package service;
import java.util.Timer;
import java.util.TimerTask;
import library.Logger;
import net.rim.device.api.system.Application;
public class App extends Application {
public App() {
Timer timer = new Timer();
TimerTask task = new TimerTask() {
public void run() {
Logger logger = new Logger();
logger.call();
}
};
timer.schedule(task, 3000, 3000);
}
public static void main(String[] args) {
App app = new App();
app.enterEventDispatcher();
}
}

Resources