更改filebast

This commit is contained in:
dxin
2025-12-11 11:11:16 +08:00
parent 8288aad918
commit 7ad9956c5d
11 changed files with 269 additions and 16 deletions

143
ES/单节点/安装es.conf Normal file
View File

@@ -0,0 +1,143 @@
# 前置 & 准备工作
sudo dnf update -y
sudo dnf install -y nano wget curl unzip
# 安全组防火墙开放9200端口、5601端口
# 安装 Elasticsearch 9.2.2
# 导入官方 GPG key
sudo rpm --import https://artifacts.elastic.co/GPG-KEY-elasticsearch
# 新建 yum repo 文件
sudo tee /etc/yum.repos.d/elasticsearch.repo <<-'EOF'
[elasticsearch]
name=Elasticsearch repository for 9.x packages
baseurl=https://artifacts.elastic.co/packages/9.x/yum
gpgcheck=1
gpgkey=https://artifacts.elastic.co/GPG-KEY-elasticsearch
enabled=1
autorefresh=1
type=rpm-md
EOF
# 安装 Elasticsearch
sudo dnf install elasticsearch --enablerepo=elasticsearch
# 先不管直接启动、报错再查看日志,有可能是权限问题
sudo systemctl daemon-reload
sudo systemctl enable elasticsearch
sudo systemctl start elasticsearch
sudo systemctl status elasticsearch
sudo journalctl -u elasticsearch -f
# 手动创建日志目录 + 设置权限
sudo mkdir -p /usr/share/elasticsearch/logs
sudo chown -R elasticsearch:elasticsearch /usr/share/elasticsearch/logs
sudo chmod 750 /usr/share/elasticsearch/logs
# 设置 elastic 超级用户密码 (推荐立即设定)
sudo /usr/share/elasticsearch/bin/elasticsearch-reset-password -u elastic
# 查看自签名证书,有则正常
ll /etc/elasticsearch/certs/
# 查看 HTTP CA 证书指纹(用于其他客户端配置)
sudo openssl x509 -fingerprint -sha256 -in /etc/elasticsearch/certs/http_ca.crt -noout
# 设置环境变量(替换为你的实际密码)
export ELASTIC_PASSWORD='MyElastic123!'
# 测试 HTTPS 请求(必须用 --cacert因启用了 TLS
curl --cacert /etc/elasticsearch/certs/http_ca.crt \
-u elastic:$ELASTIC_PASSWORD \
https://localhost:9200
# 查看默认的配置文件
grep -v '^\s*#\|^\s*$' /etc/elasticsearch/elasticsearch.yml
# 按实际情况修改配置文件集群名、非本地访问等
cluster.name: my-test-es
path.data: /var/lib/elasticsearch
path.logs: /var/log/elasticsearch
network.host: 0.0.0.0
xpack.security.enabled: true
xpack.security.enrollment.enabled: true
xpack.security.http.ssl:
enabled: true
keystore.path: certs/http.p12
xpack.security.transport.ssl:
enabled: true
verification_mode: certificate
keystore.path: certs/transport.p12
truststore.path: certs/transport.p12
cluster.initial_master_nodes: ["weblessie-server-02"]
http.host: 0.0.0.0
# 更改es的jvm大小
vim /etc/elasticsearch/jvm.options
-Xms4g
-Xmx4g
# 重启
sudo systemctl restart elasticsearch
# 准备token后续在Kibana中使用
sudo /usr/share/elasticsearch/bin/elasticsearch-create-enrollment-token -s kibana
# 准备安装 Kibana 9.2.2
# 新建 repo /etc/yum.repos.d/kibana.repo
sudo tee /etc/yum.repos.d/kibana.repo <<-'EOF'
[kibana]
name=Kibana repository for 9.x packages
baseurl=https://artifacts.elastic.co/packages/9.x/yum
gpgcheck=1
gpgkey=https://artifacts.elastic.co/GPG-KEY-elasticsearch
enabled=1
autorefresh=1
type=rpm-md
EOF
# 安装 Kibana
sudo dnf install kibana --enablerepo=kibana
# 启动
sudo systemctl daemon-reload
sudo systemctl enable --now kibana
# 访问 Kibana输入生成的token
http://ip:5601
# 获取 “verification code”
/usr/share/kibana/bin/kibana-verification-code
# 使用官方工具生成加密密钥(最规范)
sudo /usr/share/kibana/bin/kibana-encryption-keys generate --force
# 输出应类似:
# ✔ Encryption keys generated and written to /etc/kibana/kibana.yml:
# xpack.encryptedSavedObjects.encryptionKey
# xpack.reporting.encryptionKey
# xpack.security.encryptionKey
# 修改配置文件
grep -v '^\s*#\|^\s*$' /etc/kibana/kibana.yml
server.host: "0.0.0.0"
logging:
appenders:
file:
type: file
fileName: /var/log/kibana/kibana.log
layout:
type: json
root:
appenders:
- default
- file
pid.file: /run/kibana/kibana.pid
i18n.locale: "zh-CN"
elasticsearch.hosts: [https://10.0.0.38:9200]
elasticsearch.serviceAccountToken: AAEAAWVsYXN0aWMva2liYW5hL2Vucm9sbC1wcm9jZXNzLXRva2VuLTE3NjUzNDE4OTI3MjY6Um9KdUo2N1hSZVNPeGNzOXFDaUh2dw
elasticsearch.ssl.certificateAuthorities: [/var/lib/kibana/ca_1765341893683.crt]
xpack.fleet.outputs: [{id: fleet-default-output, name: default, is_default: true, is_default_monitoring: true, type: elasticsearch, hosts: [https://10.0.0.38:9200], ca_trusted_fingerprint: 80af64db043e12ebda11c10f70042af91306a705fdcb6285814a84b420c734a5}]
xpack.encryptedSavedObjects.encryptionKey: f10166c761265d5ca61e7fa2c1acac73
xpack.reporting.encryptionKey: 1772a5152522675d5a38470e905b2817
xpack.security.encryptionKey: d4b30e82e47f530a998e29cb0b8e5295

View File

@@ -0,0 +1,41 @@
# 获取ES 的证书指纹
sudo openssl x509 -fingerprint -sha256 -in /etc/elasticsearch/certs/http_ca.crt -noout
sha256 Fingerprint=80:AF:64:DB:04:3E:12:EB:DA:11:C1:0F:70:04:2A:F9:13:06:A7:05:FD:CB:62:85:81:4A:84:B4:20:C7:34:A5
# kibana web创建的用户
admin
G7ZSKFM4AQwHQpwA
# Filebeat
output.elasticsearch:
hosts: ["https://49.51.33.153:9200"]
username: "elastic"
password: "-0NiIBOJGn2CATuPWzNc"
# 用指纹验证(代替证书文件)
ssl.verification_mode: "certificate"
ssl.certificate_authorities: [] # 留空(不校验完整链)
ssl.supported_protocols: [TLSv1.2, TLSv1.3]
# 关键:指定 CA 指纹(必须全大写,无 0x带冒号
ssl.ca_trusted_fingerprint: "80AF64DB043E12EBDA11C10F70042AF91306A705FD2CB6285814A84B420C734A5"
# python
from elasticsearch import Elasticsearch
es = Elasticsearch(
hosts=["https://49.51.33.153:9200"],
basic_auth=("elastic", "-0NiIBOJGn2CATuPWzNc"),
# 指纹必须去掉冒号,全大写
ssl_assert_fingerprint="80AF64DB043E12EBDA11C10F70042AF91306A705FD2CB6285814A84B420C734A5",
verify_certs=True # 必须为 True
)
print(es.info())

View File

@@ -18,11 +18,11 @@ processors:
when: when:
equals: equals:
log_type: admin.log log_type: admin.log
tokenizer: '%{timestamp} [%{thread}] %{log_level} %{log_message}' tokenizer: '%{timestamp} %{level} %{pid} --- \\[%{thread}\\] %{class} : %{message}'
field: "message" field: "message"
target_prefix: "parsed_sys_info" target_prefix: "mylog"
ignore_missing: true ignore_missing: true
overwrite_keys: false overwrite_keys: true

View File

@@ -2,14 +2,14 @@
id: us_pord_01_flymoon-admin id: us_pord_01_flymoon-admin
enabled: true enabled: true
paths: paths:
- /root/logs/flymoon-admin/sys-info.log - /root/logs/flymoon-admin/app.log
fields: fields:
application: flymoon-admin # 自定义字段,标识应用名称 application: flymoon-admin # 自定义字段,标识应用名称
log_type: admin.log # 自定义字段,标识日志类型 log_type: admin.log # 自定义字段,标识日志类型
environment: us-pord # 自定义字段,标识机器环境名称 environment: us-pord # 自定义字段,标识机器环境名称
instance: us-prod-01 # 自定义字段,标识机器名称 instance: us-prod-01 # 自定义字段,标识机器名称
fields_under_root: true fields_under_root: true
multiline.pattern: '^\d{2}:\d{2}:\d{2}\.\d{3}' # 针对info的日志格式 multiline.pattern: '^\d{4}-\d{2}-\d{2}\ \d{2}:\d{2}:\d{2}\.\d{3}'
multiline.negate: true multiline.negate: true
multiline.match: after multiline.match: after
ignore_older: 24h # 忽略旧日志文件(避免处理已归档的日志) ignore_older: 24h # 忽略旧日志文件(避免处理已归档的日志)

View File

@@ -27,7 +27,7 @@ processors:
when: when:
equals: equals:
log_type: email.log log_type: email.log
tokenizer: '%{timestamp} [%{thread}] %{level} %{class} - [%{method_line}] - %{message}' tokenizer: '%{timestamp} %{level} %{pid} --- \\[%{thread}\\] %{message}'
field: "message" field: "message"
target_prefix: "mylog" target_prefix: "mylog"
ignore_missing: true ignore_missing: true
@@ -37,7 +37,7 @@ processors:
when: when:
equals: equals:
log_type: agent.log log_type: agent.log
tokenizer: '%{timestamp} %{level} - [%{method},%{line}] - %{message}' tokenizer: '%{date} %{time} %{level} %{pid} --- [%{thread}] %{class->} : [%{app}] %{message}'
field: "message" field: "message"
target_prefix: "mylog" target_prefix: "mylog"
ignore_missing: true ignore_missing: true
@@ -45,6 +45,7 @@ processors:
#输出 #输出
output.elasticsearch: output.elasticsearch:
hosts: ["http://106.53.194.199:9200"] hosts: ["http://106.53.194.199:9200"]

View File

@@ -9,7 +9,7 @@
environment: us-pord # 自定义字段,标识机器环境名称 environment: us-pord # 自定义字段,标识机器环境名称
instance: us-prod-02 # 自定义字段,标识机器名称 instance: us-prod-02 # 自定义字段,标识机器名称
fields_under_root: true fields_under_root: true
multiline.pattern: '^\d{2}:\d{2}:\d{2}\.\d{3}' # 针对email的sys-info.log的日志格式多行 multiline.pattern: '^\d{4}-\d{2}-\d{2}\ \d{2}:\d{2}:\d{2}\.\d{3}'
multiline.negate: true multiline.negate: true
multiline.match: after multiline.match: after
ignore_older: 24h # 忽略旧日志文件(避免处理已归档的日志) ignore_older: 24h # 忽略旧日志文件(避免处理已归档的日志)

View File

@@ -20,4 +20,3 @@
start_position: beginning # 从文件的开头读取 start_position: beginning # 从文件的开头读取

View File

@@ -25,18 +25,20 @@ processors:
- dissect: - dissect:
when: when:
equals: equals:
log_type: payment.log log_type: agent.log
tokenizer: '%{timestamp} [%{thread}] %{level} %{class} - [%{method},%{line}] - %{message}' tokenizer: '%{date} %{time} %{level} %{pid} --- [%{thread}] %{class->} : [%{app}] %{message}'
field: "message" field: "message"
target_prefix: "mylog" target_prefix: "mylog"
ignore_missing: true ignore_missing: true
overwrite_keys: true overwrite_keys: true
- dissect: - dissect:
when: when:
equals: equals:
log_type: agent.log log_type: payment.log
tokenizer: '%{timestamp} %{level} - [%{method},%{line}] - %{message}' tokenizer: '%{date} %{time} %{level} %{pid} --- [%{thread}] %{class->} : [%{app}] %{message}'
field: "message" field: "message"
target_prefix: "mylog" target_prefix: "mylog"
ignore_missing: true ignore_missing: true
@@ -44,6 +46,7 @@ processors:
#输出 #输出
output.elasticsearch: output.elasticsearch:
hosts: ["http://106.53.194.199:9200"] hosts: ["http://106.53.194.199:9200"]

View File

@@ -9,7 +9,7 @@
environment: us-pord # 自定义字段,标识机器环境名称 environment: us-pord # 自定义字段,标识机器环境名称
instance: us-prod-03 # 自定义字段,标识机器名称 instance: us-prod-03 # 自定义字段,标识机器名称
fields_under_root: true fields_under_root: true
multiline.pattern: '^\d{2}:\d{2}:\d{2}\.\d{3}' # 针对email的sys-info.log的日志格式多行 multiline.pattern: '^\d{4}-\d{2}-\d{2}\ \d{2}:\d{2}:\d{2}\.\d{3}'
multiline.negate: true multiline.negate: true
multiline.match: after multiline.match: after
ignore_older: 24h # 忽略旧日志文件(避免处理已归档的日志) ignore_older: 24h # 忽略旧日志文件(避免处理已归档的日志)

View File

@@ -2,14 +2,14 @@
id: us_pord_03_flymoon-payment id: us_pord_03_flymoon-payment
enabled: true enabled: true
paths: paths:
- /root/logs/flymoon-payment/sys-info.log - /root/logs/flymoon-payment/app.log
fields: fields:
application: flymoon-payment application: flymoon-payment
log_type: payment.log log_type: payment.log
environment: us-pord environment: us-pord
instance: us-prod-03 instance: us-prod-03
fields_under_root: true fields_under_root: true
multiline.pattern: '^\d{2}:\d{2}:\d{2}\.\d{3}' multiline.pattern: '^\d{4}-\d{2}-\d{2}\ \d{2}:\d{2}:\d{2}\.\d{3}'
multiline.negate: true multiline.negate: true
multiline.match: after multiline.match: after
ignore_older: 24h # 忽略旧日志文件(避免处理已归档的日志) ignore_older: 24h # 忽略旧日志文件(避免处理已归档的日志)

View File

@@ -0,0 +1,66 @@
pipeline {
agent any
parameters {
gitParameter(
branchFilter: 'origin/(.*)',
defaultValue: 'release',
name: 'GIT_BRANCH',
type: 'PT_BRANCH_TAG',
selectedValue: 'DEFAULT',
sortMode: 'NONE',
description: '选择代码分支: ',
quickFilterEnabled: true,
tagFilter: '*',
listSize: "5"
)
}
environment {
REMOTE_HOST = '192.168.70.15'
REMOTE_PROJECT_PATH = '/data/webapps/lessie-react'
}
stages {
stage('Checkout 代码') {
steps {
git branch: "${params.GIT_BRANCH}", credentialsId: 'fly_gitlab_auth', url: 'http://172.24.16.20/web/lessie-react.git'
}
}
stage('同步') {
steps {
sh """
ssh ${REMOTE_HOST} 'mkdir -p ${REMOTE_PROJECT_PATH}'
rsync -avz --delete --exclude='node_modules' ${WORKSPACE}/ ${REMOTE_HOST}:${REMOTE_PROJECT_PATH}/
"""
}
}
stage('安装启动') {
steps {
sh """
ssh ${REMOTE_HOST} '
cd ${REMOTE_PROJECT_PATH} &&
pm2 delete lessie-react || true &&
pm2 list &&
nvm use 22.21.1 &&
npm install &&
npm run build &&
pm2 start ecosystem.config.cjs &&
pm2 save
'
"""
}
}
}
post {
success {
echo '部署成功'
}
failure {
echo '部署失败,请检查日志'
}
}
}