-
Notifications
You must be signed in to change notification settings - Fork 17
程序日志
xiami edited this page Dec 6, 2019
·
2 revisions
序号 | 字段名 | 类型 | 说明 | Elasticsearch存储 |
---|---|---|---|---|
1 | LogAt | DateTime | 日志时间 | iso8601 |
2 | TraceId | string | 跟踪Id | 不分词 |
3 | Department | string | 部门 | 不分词,统一小写 |
4 | Team | string | 团队 | 不分词,统一小写 |
5 | Project | string | 项目名称 | 不分词,统一小写 |
6 | Host | string | 域名 | 不分词,统一小写 |
7 | ServerIP | string | 服务器IP | 不分词,统一小写 |
8 | ContextPath | string | 虚拟目录 | 不分词,统一小写 |
9 | UriStem | string | url主干 | 不分词,统一小写 |
10 | QueryString | string | GET参数 | 分词 |
11 | FormString | string | POST参数 | 分词 |
12 | UserAgent | string | UserAgent | 分词 |
13 | Level | string | 日志级别 | 不分词,统一大写 |
14 | Class | string | 记录日志所在类名 | 不分词 |
15 | Method | string | 记录日志所在方法名 | 不分词 |
16 | MethodParams | string | 抛出自定义异常方法参数 | 分词 |
17 | Line | int | 行号 | 整数 |
18 | Logger | string | 日志名 | 不分词 |
19 | IOType | string | 自定义异常io类型 | 不分词 |
20 | ExceptionType | string | 异常类型 | 不分词 |
21 | ExceptionMessage | string | 异常信息 | 分词 |
22 | CustomMessage | string | 自定义信息 | 分词 |
23 | StackTrace | string | 堆栈信息 | 分词 |
24 | HawkKey | string | Key | 不分词 |
日志内容举例:
"2018-04-20T15:18:59.773+08:00" "-" "dealer" "dealer.arch" "projectname" "-" "10.1.1.1" "-" "-" "-" "-" "-" "ERROR" "org.springframework.test.context.TestContextManager" "prepareTestInstance" "-" "234" "org.springframework.test.context.TestContextManager" "unknown" "java.lang.IllegalStateException" "Failed to load ApplicationContext" "Caught exception while allowing TestExecutionListener [org.springframework.test.context.web.ServletTestExecutionListener@17a756db] to prepare test instance [com.autohome.daimon.job.service.integration.HawkeyeServiceTest@2d10160a]" "java.lang.IllegalStateException: Failed to load ApplicationContext
at org.springframework.test.context.cache.DefaultCacheAwareContextLoaderDelegate.loadContext(DefaultCacheAwareContextLoaderDelegate.java:124)
at com.intellij.rt.execution.junit.JUnitStarter.main(JUnitStarter.java:70)
Caused by: org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'hawkeyeService': Injection of resource dependencies failed; nested exception is org.springframework.beans.factory.BeanNotOfRequiredTypeException: Bean named 'redisDao' is expected to be of type 'RedisDao' but was actually of type 'RedisDao$$EnhancerBySpringCGLIB$$17f5ad58'
at org.springframework.context.annotation.CommonAnnotationBeanPostProcessor.postProcessPropertyValues(CommonAnnotationBeanPostProcessor.java:321)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.populateBean(AbstractAutowireCapableBeanFactory.java:1268)
at org.springframework.test.context.cache.DefaultCacheAwareContextLoaderDelegate.loadContextInternal(DefaultCacheAwareContextLoaderDelegate.java:98)
at org.springframework.test.context.cache.DefaultCacheAwareContextLoaderDelegate.loadContext(DefaultCacheAwareContextLoaderDelegate.java:116)
... 24 more
Caused by: org.springframework.beans.factory.BeanNotOfRequiredTypeException: Bean named 'redisDao' is expected to be of type 'IRedisDao' but was actually of type 'RedisDao$$EnhancerBySpringCGLIB$$17f5ad58'
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:384)
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:202)
... 40 more
" "b83539ac4a40de0d"
<dependency>
<groupId>com.autohome.autolog4j</groupId>
<artifactId>log4j2x</artifactId>
<version>${latestVersion}</version>
</dependency>
用于获取web相关字段(Host, ContextPath, UriStem, QueryString, FormString, UserAgent)
@Bean
public FilterRegistrationBean log4j2MdcRegistration() {
Log4j2MdcFilter filter = new Log4j2MdcFilter();
//不设置host的话,默认从request里取
filter.setHost("www.autohome.com.cn");
return new FilterRegistrationBean(filter);
}
<?xml version="1.0" encoding="UTF-8"?>
<configuration status="WARN">
<properties>
<property name="log.path">D:\\Tomcat\\autolog4j-examples\\log4j2\\logs</property>
<property name="department">[your_department]</property>
<property name="team">[your_team]</property>
<property name="project">autolog4j.log4j2.examples</property>
<property name="log.root.level">INFO</property>
</properties>
<appenders>
<Console name="Console" target="SYSTEM_OUT">
<PatternLayout charset="UTF-8" pattern="%d{yyyy-MM-dd HH:mm:ss}`${team}`${project}`%highlight{%p}`%l`%m`%n"/>
<EnumFilter allowLevels="TRACE,DEBUG,INFO,WARN,ERROR,FATAL" />
</Console>
<!--program log start-->
<RollingFile name="ProgramError" ignoreExceptions="false"
fileName="${sys:log.path}/project_error.log"
filePattern="${sys:log.path}/project_error.log_%d{yyyy-MM-dd}">
<Autolog4jCsvLayout charset="UTF-8" department="${sys:department}" team="${sys:team}" project="${sys:project}" />
<Policies>
<TimeBasedTriggeringPolicy interval="1" modulate="true" />
</Policies>
<EnumFilter allowLevels="WARN,ERROR,FATAL" />
</RollingFile>
<RollingFile name="ProgramRun" ignoreExceptions="false"
fileName="${sys:log.path}/project_run.log"
filePattern="${sys:log.path}/project_run.log_%d{yyyy-MM-dd}">
<Autolog4jCsvLayout charset="UTF-8" department="${sys:department}" team="${sys:team}" project="${sys:project}" />
<Policies>
<TimeBasedTriggeringPolicy interval="1" modulate="true" />
</Policies>
<EnumFilter allowLevels="TRACE,DEBUG,INFO" />
</RollingFile>
<!--program log end-->
</appenders>
<loggers>
<Root level="${sys:log.root.level}">
<appender-ref ref="Console" />
<appender-ref ref="ProgramError" />
<appender-ref ref="ProgramRun" />
</Root>
</loggers>
</configuration>
<dependency>
<groupId>com.autohome.autolog4j</groupId>
<artifactId>logback</artifactId>
</dependency>
private final static String HOST = "www.autohome.com.cn";
@Bean
public FilterRegistrationBean log4j2MdcRegistration() {
LogbackMdcFilter filter = new LogbackMdcFilter();
filter.setHost(HOST);
return new FilterRegistrationBean(filter);
}
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<property name="app_id" value="1"/>
<property name="log.path" value="D:\\Tomcat\\autolog4j-examples\\logback\\logs"/>
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{dd-MM-yyyy HH:mm:ss.SSS} %magenta([%thread]) %highlight(%-5level) %logger{36}.%M - %msg%n</pattern>
</encoder>
</appender>
<appender name="ProgramRun" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/project_run.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>project_run.log.%d{yyyy-MM-dd}</fileNamePattern>
</rollingPolicy>
<filter class="com.autohome.autolog4j.logback.EnumFilter">
<allowLevels>TRACE,DEBUG,INFO</allowLevels>
</filter>
<encoder class="ch.qos.logback.core.encoder.LayoutWrappingEncoder">
<layout class="com.autohome.autolog4j.logback.Autolog4jCsvLayout">
<department>[your_department]</department>
<team>[your_team]</team>
<project>autolog4j.examples.logback</project>
<hawkKey>${app_id}</hawkKey>
</layout>
</encoder>
</appender>
<appender name="ProgramError" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/project_error.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>project_error.log.%d{yyyy-MM-dd}</fileNamePattern>
</rollingPolicy>
<filter class="com.autohome.autolog4j.logback.EnumFilter">
<allowLevels>WARN,ERROR</allowLevels>
</filter>
<encoder class="ch.qos.logback.core.encoder.LayoutWrappingEncoder">
<layout class="com.autohome.autolog4j.logback.Autolog4jCsvLayout">
<department>[your_department]</department>
<team>[your_team]</team>
<project>autolog4j.examples.logback</project>
<hawkKey>${app_id}</hawkKey>
</layout>
</encoder>
</appender>
<root level="info">
<appender-ref ref="STDOUT"/>
<appender-ref ref="ProgramRun"/>
<appender-ref ref="ProgramError"/>
</root>
</configuration>
<dependency>
<groupId>com.autohome.autolog4j</groupId>
<artifactId>log4j1x</artifactId>
<version>${latestVersion}</version>
</dependency>
private final static String HOST = "autolog4j.autohome.com.cn";
@Bean
public FilterRegistrationBean log4j2MdcRegistration() {
Log4jMdcFilter filter = new Log4jMdcFilter();
filter.setHost(HOST);
return new FilterRegistrationBean(filter);
}
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
<log4j:configuration xmlns:log4j='http://jakarta.apache.org/log4j/'>
<appender name="ProgramError" class="org.apache.log4j.DailyRollingFileAppender">
<param name="encoding" value="UTF-8"/>
<param name="file" value="${tomcat.logs.dir}/project_error.log"/>
<param name="DatePattern" value="'_'yyyy-MM-dd"/>
<layout class="com.autohome.autolog4j.log4j1x.Autolog4jCsvLayout">
<param name="department" value="${department}"/>
<param name="team" value="${team}"/>
<param name="project" value="${project}"/>
<param name="hawkKey" value="${key}"/>
</layout>
<filter class="org.apache.log4j.varia.LevelRangeFilter">
<param name="LevelMin" value="WARN"/>
</filter>
</appender>
<appender name="ProgramRun" class="org.apache.log4j.DailyRollingFileAppender">
<param name="encoding" value="UTF-8"/>
<param name="file" value="${tomcat.logs.dir}/project_run.log"/>
<param name="DatePattern" value="'_'yyyy-MM-dd"/>
<layout class="com.autohome.autolog4j.log4j1x.Autolog4jCsvLayout">
<param name="department" value="${department}"/>
<param name="team" value="${team}"/>
<param name="project" value="${project}"/>
<param name="hawkKey" value="${key}"/>
</layout>
<!--限制输出级别 -->
<filter class="org.apache.log4j.varia.LevelRangeFilter">
<param name="LevelMax" value="INFO"/>
</filter>
</appender>
<root>
<level value="INFO"/>
<appender-ref ref="ProgramError"/>
<appender-ref ref="ProgramRun"/>
</root>
</log4j:configuration>
测试可用的版本: logstash版本2.4.1,kafka版本: 0.10; 其他版本需要自己实践。
input {
tcp {
type => "applog"
codec=> json
port => [your_port]
}
}
filter {
mutate {
convert => {
"Line" => "integer"
}
uppercase => [ "Level" ]
lowercase => [ "Department", "Team", "Project", "Host", "ContextPath", "UriStem" ]
}
}
output {
if [Level] == "WARN" or [Level] == "ERROR" or [Level] == "FATAL" or [Level] == "WARNING" or [Level] == "EROR" {
kafka {
codec => json
topic_id => "project_error_log"
client_id => "client_id"
bootstrap_servers => "127.0.0.1:9092,127.0.0.2:9092"
retries => 2
}
} else {
kafka {
codec => json
topic_id => "project_info_log"
client_id => "client_id"
bootstrap_servers => "127.0.0.1:9092,127.0.0.2:9092"
retries => 2
}
}
}
下面配置是nxlog配置文件主体部分,不包含头部
<Extension syslog>
Module xm_syslog
</Extension>
<Extension json>
Module xm_json
</Extension>
#========================================================================
#applog
##======================================================================
<Extension applog_layout>
Module xm_csv
Fields $LogAt, $TraceId, $Department, $Team, $Project, $Host, $ServerIP, $ContextPath, $UriStem, $QueryString, $FormString, $UserAgent, $Level, $Class, $Method, $MethodParams, $Line, $Logger, $IOType, $ExceptionType, $ExceptionMessage, $CustomMessage, $StackTrace, $HawkKey
FieldTypes string, string, string, string, string, string, string, string, string, string, string, string, string, string, string, string, string, string, string, string, string, string, string, string
Delimiter \t
QuoteChar '"'
EscapeControl TRUE
QuoteMethod All
UndefValue ''
</Extension>
<Extension applog-multi>
Module xm_multiline
HeaderLine /^"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.+/
</Extension>
<Input applog_in>
Module im_file
File '/data/project_*.log'
SavePos TRUE
InputType applog-multi
Exec applog_layout->parse_csv();
Exec to_json();
</Input>
<Input auth_applog_in>
Module im_file
File '/data/auth/project_*.log'
SavePos TRUE
InputType applog-multi
Exec applog_layout->parse_csv();
Exec to_json();
</Input>
<Output applog_out>
Module om_tcp
Host [your_host]
Port [your_port]
</Output>
<Route applog>
Path applog_in,auth_applog_in => applog_out
</Route>