安装logstash(略)
安装logstash-output-oss插件
/data/logstash/bin/logstash-plugin install logstash-output-oss
logstash.conf配置
#
input {kafka {bootstrap_servers => ["kafka1:9092,kafka2:9092,kafka3:9092"]group_id => 'oss_logs_group'topics => ["xinlimei_pro_topic","xinlimei_logs_pro","sunmei_logs_pro_topic"]client_id => "oss_logs_1"consumer_threads => 6sasl_mechanism => "PLAIN"security_protocol => "SASL_PLAINTEXT"sasl_jaas_config => "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='kafkasunmei0227';"codec => "json"auto_offset_reset => "latest"}
}filter {mutate {lowercase => ["app_name"]# 删除没用的字段remove_field => ["_index","_id","_type","_version","_score","referer","agent","@version","host"]}date {match => ["date", "yyyy-MM-dd HH:mm:ss.SSS"]target => '@timestamp'timezone => 'Asia/Shanghai'}ruby{code => "event.set('index_day', (event.get('@timestamp').time.localtime).strftime('%Y.%m.%d'))"}
}
output {oss {# 替换为你的 OSS endpointendpoint => "http://oss-cn-shanghai-internal.aliyuncs.com"# 替换为你的 OSS 存储桶名称bucket => "tr-app-logs"# 替换为你的 AccessKey IDaccess_key_id => "your access_key"# 替换为你的 AccessKey Secretaccess_key_secret => "your access_key_secret"# 替换为日志在 OSS 中的存储路径前缀(目录名)prefix => "%{index_day}-%{[app_name]}-%{[profiles_active]}"# 在 Logstash 启动时,会尝试恢复之前未完成的上传操作。这有助于确保日志数据不会因为 Logstash 的重启而丢失recover => true#size_and_time,Logstash 会同时根据文件大小和时间间隔来滚动文件rotation_strategy => "size_and_time"time_rotate => 10 # 10分钟滚动并上传 单位为字节#100M 单位字节滚动并上传 单位为字节size_rotate => 104857600# 替换为临时文件存储目录temporary_directory => "/data/logstash/temporary/"#Logstash 会在上传前对文件进行压缩,这可以减少上传到 OSS 的文件大小encoding => "gzip"additional_oss_settings => {max_connections_to_oss => 1024secure_connection_enabled => false}codec => json {charset => "UTF-8"}}
}