storm-0.10.0所有工人登录supervisor.log

13z8s7eq  于 2021-06-21  发布在  Storm
关注(0)|答案(1)|浏览(224)

这是我的cluster.xml

<configuration monitorInterval="60">
<properties>
    <property name="pattern">%d{yyyy-MM-dd HH:mm:ss.SSS} %c{5.} [%p] %msg%n</property>
    <property name="patternMetrics">%d %-8r %m%n</property>
</properties>
<appenders>
    <RollingFile name="A1" immediateFlush="false"
                 fileName="${sys:storm.log.dir}/${sys:logfile.name}"
                 filePattern="${sys:storm.log.dir}/${sys:logfile.name}.%i.gz">
        <PatternLayout>
            <pattern>${pattern}</pattern>
        </PatternLayout>
        <Policies>
            <SizeBasedTriggeringPolicy size="100 MB"/> <!-- Or every 100 MB -->
        </Policies>
        <DefaultRolloverStrategy max="9"/>
    </RollingFile>

    <RollingFile name="THRIFT-ACCESS" immediateFlush="false"
                 fileName="${sys:storm.log.dir}/access-${sys:logfile.name}"
                 filePattern="${sys:storm.log.dir}/access-${sys:logfile.name}.%i.gz">
        <PatternLayout>
            <pattern>${pattern}</pattern>
        </PatternLayout>
        <Policies>
            <SizeBasedTriggeringPolicy size="100 MB"/> <!-- Or every 100 MB -->
        </Policies>
        <DefaultRolloverStrategy max="9"/>
    </RollingFile>
    <RollingFile name="METRICS" immediateFlush="false"
                 fileName="${sys:storm.log.dir}/metrics.log"
                 filePattern="${sys:storm.log.dir}/metrics.log.%i.gz">
        <PatternLayout>
            <pattern>${patternMetrics}</pattern>
        </PatternLayout>
        <Policies>
            <SizeBasedTriggeringPolicy size="2 MB"/> <!-- Or every 100 MB -->
        </Policies>
        <DefaultRolloverStrategy max="9"/>
    </RollingFile>
    <Syslog name="syslog" format="RFC5424" charset="UTF-8" host="localhost" port="514"
            protocol="UDP" appName="[${sys:daemon.name}]" mdcId="mdc" includeMDC="true"
            facility="LOCAL5" enterpriseNumber="18060" newLine="true" exceptionPattern="%rEx{full}"
            messageId="[${sys:user.name}:S0]" id="storm" immediateFlush="false" immediateFail="true"/>
</appenders>
<loggers>
    <Logger name="storm.kafka.KafkaSpout" level="info" additivity="false">
        <AppenderRef ref="THRIFT-ACCESS"/>
    </Logger>
    <Logger name="backtype.storm.metric.LoggingMetricsConsumer" level="info">
        <AppenderRef ref="METRICS"/>
    </Logger>
    <Logger name="storm.trident.tuple.TridentTuple" level="info" additivity="false">
        <AppenderRef ref="A1"/>
    </Logger>
    <root level="info"> <!-- We log everything -->
        <appender-ref ref="A1"/>
    </root>
</loggers>

我在supervisor.log中得到了我所有的工作日志,格式很奇怪。以下是示例:

2016-01-12 10:23:21.655 backt.storm.util [WARN] Worker Process fe85591f-a5b4-4620-9705-7d9c37450b0f:259424 [Thread-2] INFO  STDERR - 258752 [Thread-21-b-6-netflow_stream] ERROR STDIO - at storm.trident.planner.processor.AppendCollector.emit(AppendCollector.java:50)

我想我只需要“info stderr”后面的消息,日志级别应该是error而不是warn。

dauxcl2d

dauxcl2d1#

如果要将类记录到单独的文件中,则应将配置从cluster.xml传输到worker.xml,如下所示:

<configuration monitorInterval="60">
<properties>
    <property name="pattern">%d{yyyy-MM-dd HH:mm:ss.SSS} %c{1.} [%p] %msg%n</property>
    <property name="patternNoTime">%msg%n</property>
</properties>
<appenders>
    <RollingFile name="A1"
                 fileName="${sys:storm.log.dir}/${sys:logfile.name}"
                 filePattern="${sys:storm.log.dir}/${sys:logfile.name}.%i.gz">
        <PatternLayout>
            <pattern>${pattern}</pattern>
        </PatternLayout>
        <Policies>
            <SizeBasedTriggeringPolicy size="100 MB"/> <!-- Or every 100 MB -->
        </Policies>
        <DefaultRolloverStrategy max="9"/>
    </RollingFile>
    <RollingFile name="STDOUT"
                 fileName="${sys:storm.log.dir}/${sys:logfile.name}.out"
                 filePattern="${sys:storm.log.dir}/${sys:logfile.name}.out.%i.gz">
        <PatternLayout>
            <pattern>${patternNoTime}</pattern>
        </PatternLayout>
        <Policies>
            <SizeBasedTriggeringPolicy size="100 MB"/> <!-- Or every 100 MB -->
        </Policies>
        <DefaultRolloverStrategy max="4"/>
    </RollingFile>
    <RollingFile name="STDERR"
                 fileName="${sys:storm.log.dir}/${sys:logfile.name}.err"
                 filePattern="${sys:storm.log.dir}/${sys:logfile.name}.err.%i.gz">
        <PatternLayout>
            <pattern>${patternNoTime}</pattern>
        </PatternLayout>
        <Policies>
            <SizeBasedTriggeringPolicy size="100 MB"/> <!-- Or every 100 MB -->
        </Policies>
        <DefaultRolloverStrategy max="4"/>
    </RollingFile>
    <RollingFile name="CHANNE"
                 fileName="${sys:storm.log.dir}/channe-code.log"
                 filePattern="${sys:storm.log.dir}/channe-code.log.%i">
        <PatternLayout>
            <pattern>${pattern}</pattern>
        </PatternLayout>
        <Policies>
            <SizeBasedTriggeringPolicy size="100 MB"/> <!-- Or every 100 MB -->
        </Policies>
        <DefaultRolloverStrategy max="9"/>
    </RollingFile>
    <Syslog name="syslog" format="RFC5424" host="localhost" port="514"
        protocol="UDP" appName="[${sys:storm.id}:${sys:worker.port}]" mdcId="mdc" includeMDC="true"
        facility="LOCAL5" enterpriseNumber="18060" newLine="true" exceptionPattern="%rEx{full}"
        messageId="[${sys:user.name}:${sys:logging.sensitivity}]" id="storm"/>
</appenders>
<loggers>
    <root level="info"> <!-- We log everything -->
        <appender-ref ref="A1"/>
        <appender-ref ref="syslog"/>
    </root>
    <Logger name="STDERR" level="INFO">
        <appender-ref ref="STDERR"/>
        <appender-ref ref="syslog"/>
    </Logger>
    <Logger name="STDOUT" level="INFO">
        <appender-ref ref="STDOUT"/>
        <appender-ref ref="syslog"/>
    </Logger>
    <Logger name="com.cc.imp.KafkaDataSpout" level="info">
        <AppenderRef ref="CHANNE"/>
    </Logger>
    <Logger name="com.cc.imp.MessageSplitBolt" level="info">
        <AppenderRef ref="CHANNE"/>
    </Logger>
    <Logger name="com.cc.imp.CalculateBolt" level="info">
        <AppenderRef ref="CHANNE"/>
    </Logger>
    <Logger name="com.cc.imp.WriteOpenTSDBBolt" level="info">
        <AppenderRef ref="CHANNE"/>
    </Logger>
    <Logger name="com.cc.imp.WriteHbaseBolt" level="info">
        <AppenderRef ref="CHANNE"/>
    </Logger>
    <Logger name="com.cc.imp.SendTransferBolt" level="info">
        <AppenderRef ref="CHANNE"/>
    </Logger>
</loggers>
</configuration>

相关问题