#!/usr/bin/python
# -*- coding: utf-8 -*
from fabric.api import *
from fabric.context_managers import *
from fabric.contrib.console import confirm
from fabric.contrib.files import *
from fabric.contrib.project import rsync_project
import time,os

a='root@192.168.0.221:22'
b='root@192.168.0.227:22'
c='root@192.168.0.228:22'


env.passwords = {
    a:'123',
    b:'123',
    c:'123',
    g:'123'
}

env.roledefs = {
  'b':[b],
  'f':[f],
  'all':[a,b,c,d,e,f,g]
}

env.sdir="/data/soft/soft/"
env.ddir="/opt/ceshi/"

@task
@roles('all')
def isupervisord():
  #run("ifconfig -a ; return 1")
  run('[ -e "/etc/supervisord.conf" ] || (wget -O /etc/yum.repos.d/epel.repo http://mirrors.aliyun.com/repo/epel-7.repo; cd /etc/yum.repos.d;sed -i "/mirrors.aliyuncs.com/d" epel.repo; yum install -y supervisor --enablerepo=epel)')
  put(env.sdir+"supervisord/supervisord.conf","/etc/supervisord.conf")

@task
@roles('all')
def ifilebeat():
  run("[ -d /opt/ceshi ] || mkdir -p /opt/ceshi")
  #put(env.sdir+"elk/filebeat-5.1.1-linux-x86_64.tar.gz","/opt/ceshi/")
  put(env.sdir+"elk/filebeat-5.1.1-linux-x86_64","/opt/ceshi/")
  #run("cd /opt/ceshi && tar xzvf filebeat-5.1.1-linux-x86_64.tar.gz")
  run("cd /opt/ceshi && ln -s filebeat-5.1.1-linux-x86_64 f")
  run("cd /opt/ceshi/f && chmod a+x filebeat")


@task
@roles('all')
def irun():
  put(env.sdir+"elk/f/conf.f1.py","/opt/ceshi/")
  run("chmod a+x /opt/ceshi/conf.f1.py; /opt/ceshi/conf.f1.py")

@task
@roles('all')
def restartSupervisord():
  put(env.sdir+"supervisord/supervisord.conf","/etc/supervisord.conf")
  run("cd /opt/ceshi/f && chmod a+x filebeat")
  run('systemctl restart supervisord')
  run('systemctl enable supervisord')

@task
@roles('f')
def deployServer():
  run("mkdir -p /opt/ceshi/{e,k}")
  put("/data/soft/soft/elk/e/elasticsearch-5.1.1","/opt/ceshi/e")
  put("/data/soft/soft/elk/k","/opt/ceshi/k")

@task
@roles('all')
def testFileBeat():
  put("/data/soft/soft/elk/f/test.f1.py","/opt/ceshi/")
  run("chmod a+x /opt/ceshi/test.f1.py; /opt/ceshi/test.f1.py")

首先,我们的日志统一放在了/data/logs下面,现在上elk对日志进行上报,基本逻辑是:

  • 登录每一台服务器
  • 进入/data/logs目录,查看下面的文件夹
  • 将上一步得到的结果进行filebeat配置编写
  • 针对上一步的配置,配置supuervisor启动配置,以后启动使用web就可以了啦

脚本中使用的函数总结:

函数 作用 其他
isupervisord 在客户端安装supervisor软件
ifilebeat 将filebeat安装到指定目录,并给相应binanry权限
irun 运行conf.f1.py 探测脚本, 把filebeat配置和supervisor配置 搞好
restartSupervisord 就是重新启动进程的啦
deployServer 这个是部署elasticsearch和kibana的服务端
testFileBeat 发现filebeat默认只是对新文件感兴趣,搞的有些没有新数据的目录日志上不来,还以为配置错误了呢,所以写个函数测试下,注意python 在write的时候, 字符串后面要有\n这种换行符,否则收不到了礼物的哦
Copyright © opschina.org 2017 with zzlyzq@gmail.com all right reserved,powered by Gitbook该文件修订时间: 2017-07-11 11:32:48

results matching ""

    No results matching ""