Unit Test for Threaded Logging

Brian Ploetz sent me this great unit test for threaded logging.  In it we are trying to find if a deadlock occurs.

import java.lang.management.ManagementFactory;
import java.lang.management.ThreadInfo;
import java.lang.management.ThreadMXBean;

import junit.framework.TestCase;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.log4j.Appender;
import org.apache.log4j.Logger;

/**
 * Unit test for the ThrottlingFilter in a multi-threaded environment
 */
public class ThrottlingFilterThreadUTest extends TestCase {

  private static final Log logger = LogFactory.getLog(ThrottlingFilterThreadUTest.class);

  private static ThreadMXBean threadMXBean;

  @Override
  protected void setUp() throws Exception {
    super.setUp();
    threadMXBean = ManagementFactory.getThreadMXBean();
    logger.info("Thread contention monitoring supported: "
        + threadMXBean.isThreadContentionMonitoringSupported());
    logger.info("Thread contention monitoring enabled: "
        + threadMXBean.isThreadContentionMonitoringEnabled());
    threadMXBean.setThreadContentionMonitoringEnabled(true);
    logger.info("Thread contention monitoring enabled: "
        + threadMXBean.isThreadContentionMonitoringEnabled());
  }

  /**
   * Tests multiple threads using the same filter instance at the same time
   */
  public void testThreads() {
    Logger rootLogger = Logger.getRootLogger();
    assertNotNull(rootLogger);
    Appender fileAppender = rootLogger.getAppender("FILE");
    assertNotNull(fileAppender);
    ThrottlingFilter throttlingFilter = (ThrottlingFilter) fileAppender.getFilter();
    assertNotNull(throttlingFilter);

    ThreadGroup infoThreadGroup = new ThreadGroup("info-group");
    ThreadGroup errorThreadGroup = new ThreadGroup("error-group");
    Thread errorThread1 = new ErrorThread(errorThreadGroup, "error-thread-1");
    Thread infoThread1 = new InfoThread(infoThreadGroup, "info-thread-1");
    Thread errorThread2 = new ErrorThread(errorThreadGroup, "error-thread-2");
    Thread infoThread2 = new InfoThread(infoThreadGroup, "info-thread-2");
    infoThread1.start();
    errorThread1.start();
    errorThread2.start();
    infoThread2.start();

    while (true) {
      ThreadInfo[] threadInfos = threadMXBean.getThreadInfo(threadMXBean.getAllThreadIds());
      for (int i = 0; i < threadInfos.length; i++) {
        ThreadInfo threadInfo = threadInfos[i];
        if (threadInfo != null && threadInfo.getThreadState() == Thread.State.BLOCKED) {
          System.out.println("Thread '" + threadInfo.getThreadName()
              + "' is blocked on the monitor lock '" + threadInfo.getLockName()
              + "' held by thread '" + threadInfo.getLockOwnerName() + "'");
        }
      }

      if (!infoThread1.isAlive() && !errorThread1.isAlive() && !infoThread2.isAlive()
          && !errorThread2.isAlive())
        break;
    }
  }

  public static class ErrorThread extends Thread {

    private static final Log logger = LogFactory.getLog(ErrorThread.class);

    public ErrorThread(ThreadGroup tg, String name) {
      super(tg, name);
    }

    public void run() {
      for (int i = 0; i < 10; i++) {
        try {
          test(0);
        } catch (Exception e) {
          long start = System.currentTimeMillis();
          logger.error("Error!", e);
          long end = System.currentTimeMillis();
          System.out.println("Took " + (end-start) + "ms to log error");
        }
      }
    }

    // simulate large stack traces
    private void test(int i) {
      if (i >= 500)
        throw new RuntimeException("D'OH!");
      test(i+1);
    }
  }

  public static class InfoThread extends Thread {

    private static final Log logger = LogFactory.getLog(InfoThread.class);

    public InfoThread(ThreadGroup tg, String name) {
      super(tg, name);
    }

    public void run() {
      for (int i = 0; i < 100; i++) {
        logger.info("Hi!");
      }
    }
  }
}

The log4j.xml test file.

<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">

<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/"
    debug="false">

    <!-- ================================= -->
    <!--           Appenders               -->
    <!-- ================================= -->

    <!-- A time/date based rolling file appender -->
    <appender name="FILE"
        class="org.apache.log4j.DailyRollingFileAppender">
        <param name="File" value="server.log" />
        <param name="Append" value="true" />

        <!-- Rollover at midnight each day -->
        <param name="DatePattern" value="'.'yyyy-MM-dd" />

        <layout class="org.apache.log4j.PatternLayout">
            <!-- The default pattern: Date Priority [Category] Message\n -->
            <param name="ConversionPattern" value="%d %-5p [%c] %m%n" />
        </layout>

        <filter class="com.betweengo.log4j.ThrottlingFilter">
          <param name="maxCountSameMessage" value="100"/>
          <param name="maxCountSavedMessages" value="100"/>
          <param name="waitInterval" value="60"/>
        </filter>
    </appender>

    <!-- ======================= -->
    <!-- Setup the Root category -->
    <!-- ======================= -->

    <root>
        <level value="INFO" />
        <appender-ref ref="FILE" />
    </root>

</log4j:configuration>

mod_rewrite to bypass security

Many Apache webserver installations use uriworkermap to configure requests are forwarded to Tomcat/JBoss and which are not.   This provides a certain level of security.  For example:

## APACHE RESOURCES (static files):
!/*.gif=myapp
!/*.html=myapp

## DISALLOW  (security-related filter):
!/*.jsp=myapp
!/*.xml=myapp

## TOMCAT RESOURCES:
/*.do=myapp

However if you dynamically generate your sitemap.xml or any other XML files using a servlet then this security will be a problem since the XML request will not make it to Tomcat/JBoss.  This is when mod_rewrite comes to the rescue.

You can set up mod_rewrite to rewrite the sitemap.xml request to be a sitemap.do request.

RewriteRule ^/sitemap\.xml$ /sitemap.do [PT,L]

Then you can set up Struts to forward this request to sitemap.xml.

<action path="/sitemap" forward="/sitemap.xml"/>

Turning off JSP access

To turn off JSP access in your JBoss or other favorite application server add this to your web.xml.

<!-- Restrict direct access to jsps -->
<security-constraint>
  <web-resource-collection>
    <web-resource-name>you_cant_touch_this</web-resource-name>
    <url-pattern>*.jsp</url-pattern>
  </web-resource-collection>
  <auth-constraint/>
</security-constraint>

To prevent Apache from sending JSP requests to JBoss add the following to your configuration.

## DISALLOW FROM REACHING JBOSS (security-related filter):
!/*.jsp=name_of_your_app
!/*.xml=name_of_your_app

Maven Integration for Eclipse

Previously I was using an old Maven integration for Eclipse, version 0.0.11, which I got from http://m2eclipse.codehaus.org/update.  The plugin was horribly slow and seemed to sometimes interfere with my builds.

Today I upgraded to the latest, version 0.9.4, from http://m2eclipse.sonatype.org/update/ and things are moving much more smoothly.  You can learn more at Maven Integration for Eclipse.

In Windows > Preferences > Maven I only turned on Download Artifact Sources and Download Artifiact JavaDoc.  Download repository index updates on startup is on by default but I turned it off after receiving this advice from a fellow developer, Bill Crook.

It’s unclear to me how the m2 plugin uses the indexes so I don’t think I can answer you. 🙂 Enabling it causes the plugin to rescan the entire repository if i remember correctly. I personally am not willing to have Eclipse do a full file scan of the local repository each time i start for unknown reasons. if you don’t mind the incurred overhead, go ahead and enable it!

The Maven plugin has a nice dependency feature which Bill Crook explains well.

There will come a time when you need to work on two projects simultaneously. Additionally, there will probably be a dependency from one project to another. The key to doing this effectively is understanding workspace resolution. Workspace resolution is a concept of the m2eclipse plugin. The way this works is that the plugin scans all projects in your workspace and analyzes the poms. Based on group and artifact ID’s the plugin will know if there are interdependencies between your projects. Once the plugin has successfully detected the dependency between projects, you can jump from one project source into another when jumping into methods (via control-click or F3).

There is one important caveat to this, versions. In addition to looking at group and artifact ID’s, the m2eclipse plugin will look at versions to determine if the project for a dependency is in the current workspace. Because of this you must make sure the versions match. Let’s take the example of projects A and B in your workspace. Assume A has a dependency on version 2.0.0.1 of B. Now, if the version of project B in your workspace is 3.0-SNAPSHOT, workspace resolution will NOT work. Can you figure out how to make this work? If you guessed, update the pom of project A to depend on 3.0-SNAPSHOT, you are correct. The moral of the story is be aware of versions when trying to get workspace resolution working.

You will know that workspace resolution is not functioning properly if you see duplicates of the same class when doing searches with control-shift-t. This behavior makes sense when you think about it as Eclipse sees two of these classes in your workspace. For example, continuing with projects A and B as above, let’s assume there is a class Foo in project B. Eclipse would find class Foo in project B source (as java source in your workspace) as well as in version 2.0.0.1 of B (as a class file in jar dependency).

Now if someone would improve the Perforce plugin…

Enabling Trace Level Debugging in JBoss

In JBoss 4.0.4.GA it took me awhile to figure out how to enable trace level debugging.

Typically you could do something like this to enable trace level debugging for a category of classes.

<category  name="com.betweengo.app">
  <priority  value="TRACE"/>
</category>

However JBoss 4.0.4.GA has an older log4j implementation so you need to use JBoss’s custom TRACE level.

<category  name="com.betweengo.app">
  <priority  value="TRACE" class="org.jboss.logging.XLevel"/>
</category>

This is documented in the release notes for JBoss-4.2.1.GA.

Since the latest log4j includes a trace level, there is no need to reference the custom jboss TRACE level in conf/jboss-log4j.xml configs, JBAS-4163.

There is additional information on trace level debugging in the articles Enabling TRACE logging on server and Using Logging.

Maven Profiles

Maven profiles are a pretty neat concept for organizing different settings for different builds.

Typically one sets them up in your settings.xml.  Projects will also have profiles in profiles.xml whose values you can override with your values in settings.xml.

A few observations I made about profiles.

  1. Your profile is the conglomeration of all your active profiles in your settings.xml.
  2. To activate other profile during a single maven execution, mvn -P profile1, profile2.

Stats on Dreamhost with WordPress

Analog: WWW logfile analysisOn my WordPress blogs hosted by Dreamhost to get stats I needed to add this to the .htaccess file.

# BEGIN Stats
<IfModule mod_rewrite.c>
RewriteEngine On
RewriteBase /
RewriteCond %{REQUEST_URI} ^/(stats|failed_auth\.html).*$ [NC]
RewriteRule . - [L]
</IfModule>
# END Stats

This is further explained in the Dreamhost Wiki page Making stats accessible with htaccess.

Dreamhost uses Analog to generate web statistics. Other recommended AWStats which seems more powerful. Maybe I’ll switch if I feel the need.

Log4j levels

I always forget which level is which in Log4j so I copied this from the JavaDoc for the Level class.

static Level ALL
The ALL has the lowest possible rank and is intended to turn on all logging.
static Level DEBUG
The DEBUG Level designates fine-grained informational events that are most useful to debug an application.
static Level ERROR
The ERROR level designates error events that might still allow the application to continue running.
static Level FATAL
The FATAL level designates very severe error events that will presumably lead the application to abort.
static Level INFO
The INFO level designates informational messages that highlight the progress of the application at coarse-grained level.
static Level OFF
The OFF has the highest possible rank and is intended to turn off logging.
static Level TRACE
The TRACE Level designates finer-grained informational events than the DEBUG
static Level WARN
The WARN level designates potentially harmful situations.

Size of collection in a JSP/DSP page

leaf pile on Flickr

(Photo: leaf pile by oeimah)

Sometimes in a JSP/DSP page you will want to get the size of a collection and unless you are within a Range, ForEach or similar droplet you won’t have access to this value.

Struts has a nice solution using the bean:size tag.  JSTL 1.1 has a nice solution using the fn:length function.

Here is an example of how to use Struts, DSPEL and JSTL to get the size of a collection.

  <dspel:getvalueof param="book.pages" var="pages"
                    vartype="java.util.Collection"/>
  <bean:size id="numPages" name="pages"/>
  Number of Pages: <c:out value="${numPages}"/>
  Number of Pages: <dspel:valueof value="${numPages}"/>

Here is an example of how to use JSTL 1.1 and DSPEL to get the size of a collection.

 

  <dspel:getvalueof param="book.pages" var="pages"
                    vartype="java.util.Collection"/>
  Number of Pages: <c:out value="${fn:length(pages)}"/>

Maven Assembly Convert Shell Scripts to use UNIX LF’s

In a previous post, Shell Scripts with Windows LF’s Fail in Latest Cygwin, I wrote about how to use Perforce to check out all files in UNIX format. However if you use Eclipse then this solution will not work because Eclipse always inserts Windows LF’s in any line you insert into a file leading to a mess with files that have both UNIX and Windows LF’s.

However if you happen to use Maven to generate / copy your shell scripts to their target directories you can take advantage of the lineEnding property in the Assembly Director. If you specify the lineEnding property to be “unix” then the outputted shell scripts will be in UNIX format. For example:

<file>
  <source>src/main/scripts/foo.sh</source>
  <outputDirectory>bin</outputDirectory>
  <lineEnding>unix</lineEnding>
</file>