Nil Class error crashing pipeline

I started getting this error today with some new code I put into logstash. I'm having trouble tracking it down. Would appreciate any help on where to start.

The error:
NoMethodError: undefined method `strip' for nil:NilClass
filter at /opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-filter-xml-2.0.2/lib/logstash/filters/xml.rb:94
multi_filter at /opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-2.1.0-java/lib/logstash/filters/base.rb:151
each at org/jruby/RubyArray.java:1613
multi_filter at /opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-2.1.0-java/lib/logstash/filters/base.rb:148
cond_func_71 at (eval):2134
each at org/jruby/RubyArray.java:1613
cond_func_71 at (eval):2129
filter_func at (eval):820
filterworker at /opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-2.1.0-java/lib/logstash/pipeline.rb:243
start_filters at /opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-2.1.0-java/lib/logstash/pipeline.rb:177

if "activity" in [tags] {

    # adjust timezone
    date{
        match => ["requesttime_epoch", "ISO8601", "UNIX_MS"]
        timezone => "America/New_York"
        #locale => "en"
        target => requesttime
        remove_field => ["requesttime_epoch"]
    }

    date{
        match => ["responsetime_epoch", "ISO8601", "UNIX_MS"]
        timezone => "America/New_York"
        #locale => "en"
        target => responsetime
        remove_field => ["responsetime_epoch"]
    }


      # xml parsing - XPATH to be implemented
    xml{
        # Parse XML field
        source => "request"
        target => "parsed"
        add_tag => ["xml_parsed"]
        xpath => [
        "namespace-uri(/*)", "MessageNamespace",
        "concat(//Context/Id, substring('', 1 div not(//Context/Id/text())))", "MessageContextID",
        "concat(/*[local-name()='Request']/Parameter/Session/UserHeaderAgent, substring('', 1 div not(/*[local-name()='Request']/Parameter/Session/UserHeaderAgent/text())))", "UserHeaderAgent",
        "concat(/*[local-name()='Request']/Parameter/Session/Id, substring('', 1 div not(/*[local-name()='Request']/Parameter/Session/Id/text())))", "SessionID",
        "concat(/*[local-name()='Request']/Parameter/Session/IpAddress, substring('', 1 div not(/*[local-name()='Request']/Parameter/Session/IpAddress/text())))", "IpAddress",
        "concat(//Context/Source, substring('', 1 div not(//Context/Source/text())))", "RequestSource"
        ]
        store_xml => false
      }

    grok{
        match => {"MessageNamespace" => "^(?:[^\/]*\/){3}(?<OperationFqn>(?<OperationCategory>\S+?)\/(?<OperationName>\w+)\/(?<OperationVersion>[\d\/]+?))\/{0,1}$"}

    }

      # Geo IP
    if [IpAddress]{
        geoip {
            source => "IpAddress"
            database => "/opt/logstash/vendor/geoip/GeoLiteCity.dat"
            target => "geoip"
            # add_field => [ "[geoip][coordinates]", "%{[geoip][longitude]}" ]
            # add_field => [ "[geoip][coordinates]", "%{[geoip][latitude]}"  ]
        }
      }

    ruby {
        code => "event['index_day'] = event['responsetime'].time.localtime.strftime('%Y.%m.%d')"

    }

}

The request field wasn't set for this event. Starting with version 2.1.1 of the xml filter you'll get a decent error message instead of the stack trace above (see PR #21).