Python /PostgresqSQL 9.4 – Server Performance Data Capture V.2

In this second version, the scripts which capture data for the servers are using classes. In addition the the script that captures dynamic data from the remote servers captures additional data. In specific it captures RAM, HD usage in raw numbers and not only “humanized” format. The humanized fields were not taken out. As a result the appropriate tables had to be modified, which means the schema has changed as well. Also, the database and the scripts are now being installed via puppet. The Puppet manifests are far from polished. They need more work but they do work.

These scripts gather static and dynamic information from servers and insert that data into a PostgresSQL database. The static information is information that unless a major upgrade takes place it never changes. They Dynamic data is performance data of the servers. The purpose of the static data is to be able to query for dynamic information which being inserted to the database every X amount of minutes via cron.

These scripts work only on Linux/Unix based machines.

The PUPPET modules used to install the Database and the scripts are located here

The static information for the remote servers are as follows :

hostname
iface
ipaddress
OS
OSRel
OSKern
total_M
brand
Hz
cores
arch

The dynamic information for the remote servers are as follows :

hostname
iface
ipaddress
total_ram_hum
used_ram_hum
total_ram_raw
used_ram_raw
used_ram_perc
total_HD_hum
used_HD_hum
total_HD_raw
used_HD_raw
used_HD_perc
cpu_use_perc
swap_used_hum
swap_total_hum
swap_perc
swap_used_raw
swap_total_raw

The static.py script will need to be run only one time on the remote servers or when a major upgrade occurs that might change configuration regarding RAM, Partitions, IP, Operating System (even an upgrade), CPU, NIC replacement.

The dynamic.py script will be run on the remote servers via cron. It is the script that captures information which is being constantly changed, like memory, storage, swap usage. All this data is sent for insertion to the remote database. The script executes it self via cron and then sent to be inserted into a PostgreSQL database.

In both scripts data is entered into a dictionary and then a connection to the database is created in order to insert the data.

The Static.py script

#!/usr/bin/python
import psutil
import os
import math
import platform
import subprocess
import socket
import psycopg2
import netifaces as ni
import humanize
from cpuinfo import cpuinfo


class Static():
    def __init__(self):
        #NIC INFO
        self.hostname   = socket.gethostname()
        self.iface      = ni.interfaces()[1]
        self.ipaddress  = ni.ifaddresses(self.iface)[2][0]['addr']
        #---OS INFO
        #For Linux (RH-Debian) Operating Systems
        self.distro  = platform.dist()[0]
        self.release = platform.dist()[1]
        self.kernel  = platform.release()
        #For Windows Operating Systems
        self.osinfo_2_os    = platform.uname()[0]
        self.osinfo_2_ver   = platform.uname()[2]
        self.osinfo_2_rel   = platform.uname()[3]
        #----RAM INFO
        raw_totalM = psutil.virtual_memory().total
        self.total_M    = humanize.naturalsize(raw_totalM)
        #----CPU INFO
        self.info       = cpuinfo.get_cpu_info()
        self.brand      = self.info['brand']
        self.Hz         = self.info['hz_advertised']
        self.cores        = self.info['count']
        self.arch       = self.info['bits']

    def get_OS_make(self):
       if platform.system()  =="Linux":
           return self.distro, self.release, self.kernel
       elif platform.system()     =="Windows":
           return self.osinfo_2_os, self.osinfo_2_ver, self.osinfo_2_rel

info = Static()



hostname  = info.hostname
iface     = info.iface
ipaddress = info.ipaddress
OS        = info.get_OS_make()[0]
OSRel     = info.get_OS_make()[1]
OSKern    = info.get_OS_make()[2]
total_M   = info.total_M
brand     = info.brand
Hz        = info.Hz
cores     = info.cores
arch      = info.arch




#Create the Database PostgreSQL 9.4 connection.
conn = psycopg2.connect("host='172.31.98.161' dbname='servers' user='seeker'")
cur = conn.cursor() #Create the cursor
#Create a Dictionary to pass the value of each function.
server_info = {'hostname': hostname, 'iface':iface, 'ipaddress': ipaddress, 'OS': OS, 'OSRel': OSRel, 'OSKern': OSKern, 'total_M': total_M, 'brand': brand, 'Hz':Hz, 'cores': cores, 'arch': arch}
cur.execute("INSERT INTO servers(hostname, iface, ipaddress, OS, OSRel, OSKern, total_M, brand, Hz, cores, arch) VALUES ('%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s')" % (hostname, iface, ipaddress, OS, OSRel, OSKern, total_M, brand, Hz, cores, arch))
#If this is not present the changes will not get commited.
conn.commit()

The Dynamic.py srcipt

#THIS FILE IS MANAGED BY PUPPET
#!/usr/bin/python
import psutil
import os
import math
import platform
import subprocess
import socket
import psycopg2
import netifaces as ni
import humanize
from cpuinfo import cpuinfo

class Dynamic():
    def __init__(self):
        #NIC INFO
        self.hostname   = socket.gethostname()
        self.iface      = ni.interfaces()[1]
        self.ipaddress  = ni.ifaddresses(self.iface)[2][0]['addr']
        #RAM USAGE INFO-------------------------------------------------------------------
        self.total_ram_hum       = humanize.naturalsize(psutil.virtual_memory().total)
        self.used_ram_hum        = humanize.naturalsize(psutil.virtual_memory().used)
        #---------Raw info
        self.total_ram_raw       = (psutil.virtual_memory().total)
        self.used_ram_raw        = (psutil.virtual_memory().used)
        self.used_ram_perc       = psutil.virtual_memory().percent
        #HD USAGE INFO-------------------------------------------------------------------
        self.total_HD_hum        = humanize.naturalsize(psutil.disk_usage('/').total)
        self.used_HD_hum         = humanize.naturalsize(psutil.disk_usage('/').used)
        #---------Raw info
        self.total_HD_raw        =(psutil.disk_usage('/').total)
        self.used_HD_raw         =(psutil.disk_usage('/').used)
        self.used_HD_perc        = psutil.disk_usage('/').percent
        #CPU USAGE INFO-------------------------------------------------------------------
        self.cpu_use_perc        = psutil.cpu_percent()
        #SWAP USAGE INFO-------------------------------------------------------------------
        self.swap_used_hum           = humanize.naturalsize(psutil.swap_memory().used)
        self.swap_total_hum          = humanize.naturalsize(psutil.swap_memory().total)
        self.swap_perc               = psutil.swap_memory()[3]
        #---------Raw info
        self.swap_used_raw           = (psutil.swap_memory().used)
        self.swap_total_raw          = (psutil.swap_memory().total)
    def export_to_csv(self):
        print self.hostname
info = Dynamic()

hostname            = info.hostname
iface               = info.iface
ipaddress           = info.ipaddress
total_ram_hum       = info.total_ram_hum
used_ram_hum        = info.used_ram_hum
total_ram_raw       = info.total_ram_raw
used_ram_raw        = info.used_ram_raw
used_ram_perc       = info.used_ram_perc
total_HD_hum        = info.total_HD_hum
used_HD_hum         = info.used_HD_hum
total_HD_raw        = info.total_HD_raw
used_HD_raw         = info.used_HD_raw
used_HD_perc        = info.used_HD_perc
cpu_use_perc        = info.cpu_use_perc
swap_used_hum       = info.swap_used_hum
swap_total_hum      = info.swap_total_hum
swap_perc           = info.swap_perc
swap_used_raw       = info.swap_used_raw
swap_total_raw      = info.swap_total_raw


conn = psycopg2.connect("host='172.31.98.161' dbname='servers' user='seeker'")
cur = conn.cursor() #Create the cursor
#Create a Dictionary to pass the value of each function.
server_info = {'hostname':hostname, 'iface': iface,'ipaddress': ipaddress, 'total_ram_hum': total_ram_hum, 'used_ram_hum': used_ram_hum, 'total_ram_raw': total_ram_raw, 'used_ram_raw':used_ram_raw,'used_ram_perc': used_ram_perc, 'HD_hum': total_HD_hum, 'used_HD_hum': used_HD_hum, 'total_HD_raw': total_HD_raw, 'used_HD_raw':used_HD_raw, 'used_HD_perc': used_HD_perc, 'cpu_use_perc': cpu_use_perc,'swap_used_hum':swap_used_hum, 'swap_total_hum': swap_total_hum, 'swap_perc': swap_perc, 'swap_used_raw': swap_used_raw, 'swap_total_raw': swap_total_raw}
cur.execute("INSERT INTO SERVER_PERF(hostname, iface, ipaddress, total_ram_hum, used_ram_hum, total_ram_raw, used_ram_raw, used_ram_perc, total_HD_hum, used_HD_hum, total_HD_raw,used_HD_raw,used_HD_perc, cpu_use_perc,swap_used_hum, swap_total_hum, swap_perc, swap_used_raw, swap_total_raw) VALUES ('%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s')" % (hostname, iface, ipaddress, total_ram_hum, used_ram_hum, total_ram_raw, used_ram_raw,used_ram_perc, total_HD_hum, used_HD_hum, total_HD_raw, used_HD_raw, used_HD_perc, cpu_use_perc, swap_used_hum, swap_total_hum, swap_perc, swap_used_raw, swap_total_raw))
#If this is not present the changes will not get commited.
conn.commit()

The new Database schema is as follows:

pg_dump -U seeker -d servers -s -h 172.31.98.161 > servers_db_schema

--
CREATE DATABASE SERVERS;
CREATE ROLE seeker WITH PASSWORD 'Password!';
ALTER DATABASE SERVERS OWNER TO seeker;
ALTER ROLE seeker WITH LOGIN;
GRANT ALL PRIVILEGES ON DATABASE SERVERS to seeker;
-- PostgreSQL database dump
--

SET statement_timeout = 0;
SET lock_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SET check_function_bodies = false;
SET client_min_messages = warning;

--
-- Name: plpgsql; Type: EXTENSION; Schema: -; Owner:
--

CREATE EXTENSION IF NOT EXISTS plpgsql WITH SCHEMA pg_catalog;

--
-- Name: EXTENSION plpgsql; Type: COMMENT; Schema: -; Owner:
--

COMMENT ON EXTENSION plpgsql IS 'PL/pgSQL procedural language';

SET search_path = public, pg_catalog;

SET default_tablespace = '';

SET default_with_oids = false;

--
-- Name: server_perf; Type: TABLE; Schema: public; Owner: seeker; Tablespace:
--

CREATE TABLE server_perf (
hostname text NOT NULL,
iface text,
ipaddress inet NOT NULL,
total_ram_hum text,
used_ram_hum text,
total_ram_raw numeric(30,2),
used_ram_raw numeric(30,2),
used_ram_perc text,
total_hd_hum text,
used_hd_hum text,
total_hd_raw numeric(30,2),
used_hd_raw numeric(30,2),
used_hd_perc text,
cpu_use_perc text,
swap_used_hum text,
swap_total_hum text,
swap_perc text,
swap_used_raw numeric(30,2),
swap_total_raw numeric(30,2),
time_captured timestamp without time zone DEFAULT now()
);

ALTER TABLE server_perf OWNER TO seeker;

--
-- Name: servers; Type: TABLE; Schema: public; Owner: seeker; Tablespace:
--

CREATE TABLE servers (
hostname text NOT NULL,
iface text,
ipaddress inet NOT NULL,
os text,
osrel text,
oskern text,
total_m text,
brand text,
hz text,
cores numeric(4,1),
arch text
);

ALTER TABLE servers OWNER TO seeker;

--
-- Name: pk_hostname; Type: CONSTRAINT; Schema: public; Owner: seeker; Tablespace:
--

ALTER TABLE ONLY servers
ADD CONSTRAINT pk_hostname PRIMARY KEY (hostname);

--
-- Name: server_perf_hostname_fkey; Type: FK CONSTRAINT; Schema: public; Owner: seeker
--

ALTER TABLE ONLY server_perf
ADD CONSTRAINT server_perf_hostname_fkey FOREIGN KEY (hostname) REFERENCES servers(hostname);

--
-- Name: public; Type: ACL; Schema: -; Owner: postgres
--

REVOKE ALL ON SCHEMA public FROM PUBLIC;
REVOKE ALL ON SCHEMA public FROM postgres;
GRANT ALL ON SCHEMA public TO postgres;
GRANT ALL ON SCHEMA public TO PUBLIC;

--
-- PostgreSQL database dump complete
--
Share Button

Python – RPM and Python Module check and auto installer

The script below will do a series of checks of RPMs packages needed for “pip” to work. Then it does a check of certain python modules. If any of the packages are not installed the script will automatically will download them.

import subprocess

def module_checks():
# This module checks if the required modules for python to work with postgres e      installed
# It first checks if gcc and pip tools are installed which allows to install pho     n modules
     try:

        if subprocess.call(["which", "gcc"]) == 0:
            print "PIP INSTALLED"

        elif subprocess.call(["which", "gcc"]) == 1:

            try:
                subprocess.call(["yum","install", "-y", "gcc"])

            except Exception as e:
                print e

     except Exception as e:
        print e

     try:

        if subprocess.call(["which", "pip"]) == 0:
            print "PIP INSTALLED"

        elif subprocess.call(["which", "pip"]) == 1:

            try:

                subprocess.call(["yum", "-y", "install", "python-devel"])
                subprocess.call(["easy_install", "pip"])

            except Exception as e:
                print e

     except:

        subprocess.call(["yum", "-y", "install", "python-devel"])
        subprocess.call(["easy_install", "pip"])

     try:

        import netifaces

     except ImportError:

        subprocess.call(["pip", "install", "netifaces"])

     try:

        import psycopg2

     except ImportError:

        try:

            subprocess.call(["yum", "install", "-y", "python-psycopg2"])

        except Exception as e:

            print e
Share Button

Python – Centos 6 – PostgresSQL9.4 – Check if PostgresSQL is installed. If not, install it and setup a Database

Script check if postgresql is installed, if it is not it will go ahead and do the appropriate checks to see if the Postgresql YUM RPM packages installed and the proceed to install PostgresSQL. It then does the post configuration steps. It can also create a database with a user assigned to it.

#!/usr/bin/python
# -*- coding: utf-8 -*-
import psutil
import os
import subprocess
import psycopg2


def install_PostgresSQL():
    try:

        if subprocess.call(["rpm", "-q", "pgdg-centos94"]) == 0:
            print "POSTGRES-SQL IS ALREADY INSTALLED"

        elif subprocess.call(["rpm", "-q", "pgdg-centos94"]) == 1:
            subprocess.call(["yum", "localinstall", "-y", "http://yum.postgresql.org/9.4/redhat/rhel-6-x86_64/pgdg-centos94-9.4-1.noarch.rpm"])
            subprocess.call(["yum", "install", "-y", "postgresql94-contrib", "postgresql94-server"])
            print "#################################################################"
            post_install_PostgreSQL()

    except Exception as e:
        print e
        print "Install failed. Exiting with Error"


def post_install_PostgreSQL():
    try:
        path = "/etc/sysconfig/pgsql/"
        fname = 'postgresql-9.4'
        ##os.chdir(path)
        full_path = os.path.join(path, fname)
        file  = open(full_path, "w")
        file.write('GDATA=/home/postgres94/pgsql/data94\nPGLOG=/home/postgres94/pgsql/pgstartup94.log')
        file.close()
        subprocess.call(["/usr/pgsql-9.4/bin/postgresql94-setup", "initdb"])
        subprocess.call(["systemctl", "enable", "postgresql-9.4"])
        subprocess.call(["systemctl", "start", "postgresql-9.4"])
        create_Database()

    except Exception as e:
        print e
        print "Error in Post Install steps"



def create_Database():
    try:
        conn = psycopg2.connect("host='10.0.0.41' dbname='postgres' user='postgres'")
        cur = conn.cursor() #Create the cursor
        #cur.execute("""SELECT * from servers""")
        conn.autocommit = True
        cur.execute("""CREATE DATABASE test""")
        cur.execute("""CREATE ROLE george WITH PASSWORD 'YourPasswordHere'""")
        cur.execute("""GRANT ALL PRIVILEGES ON DATABASE test to george""")
        #If this is not present the changes will not get commited.
        conn.commit()
        #for row in cur: #Enable the following two lines to view query results
        #    print rows
    except Exception as e:
        print e



check_PostgresSQL()
create_Database()
Share Button

Python: Check IPs for DNS entries and see if host is UP or DOWN, check SSH login. Export info to a CSV file

This script is a simple revision of the DNS_HOST_CHECK.py script. The difference is this script, in addition to the previous checks, attempts also an SSH login and reports back it failed or not

import socket
import subprocess
import netifaces
import csv
import paramiko
 
def checkPING(IP):
    try:
        ping = subprocess.check_output(['ping', '-c1', ip])
        return "Host is UP"
    except:
        return "Host is DOWN"
 
def checkDNS(IP):
    try:
        dns = socket.gethostbyaddr(ip)
        return dns[0]
    except:
        return "No DNS entry found"
 
def checkSSH(IP):
    try:
        ssh = paramiko.SSHClient()
        ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
        ssh.connect(ip, username='root', password='Password!')
        ssh.close()
        return "SSH OK"
    except:
        return "SSH NO"
 
ip_list = []
dns_list = []
status_list = []
ssh_list = []
 
csvfile= open('file.csv', 'w')
for loop_ip in range (30):
    ip = '10.0.0.%d' % loop_ip
    print ip, checkDNS(ip), checkPING(ip),checkSSH(ip)
    data = ip+" ",checkDNS(ip)+" ",checkPING(ip)+" ",checkSSH(ip)
    #data_list.append(data)
    ip_list.append(ip)
    dns_list.append(checkDNS(ip))
    status_list.append(checkPING(ip))
    ssh_list.append(checkSSH(ip))
#print data_list
writer = csv.writer(csvfile, dialect='excel')
writer.writerows(zip(ip_list, dns_list, status_list, ssh_list))

Share Button

Python – Parse URLs from files in directory to check if links are up/down before executing yum on Centos.

This script parses the files in the yum repo directory and checks if all URLS are up before proceeding to execute yum. It creates two lists to do a sanity check. One list stores the number of URLs found and the second list stores the number of links that have been found to be working. If those two numbers do not match the execution of YUM is aborted. Please note, this script will work only with custom made yum repos.

import os
import subprocess
import re
import urllib2
import os.path


def check_Software():
    path = '/usr/bin/gcc'
    if os.path.isfile(path):
        pass
    else:
        try:
            subprocess.call(["yum", "install", "-y", "gcc"])
            if not os.patch.isfile(path):
                print "Sending email"

        except OSError as er:
            print er

def yum_link_Search():
    #These two lists are being used as control. The status list is used to append succesful HTTP connections
    #for each of the URLs captured from the files in the YUM directory. The latter is stored in the files_list list.
    #The number (amount of elements) is being compared. A success condition is when each site returns an alive code of
    #200. This tells us that all sites are up and running. View lines between 52 and 60
    status_list = []
    files_list = []
    src_dict = ("/etc/yum.repos.d/") #Specify base directory
    pattern = re.compile ('http\S+') #CPatter to search for

    for yum_files in os.listdir(src_dict): # obtain list of files in directory
        files = os.path.join(src_dict, yum_files) #join the full path with the names of the files.
        files_list.append(yum_files)
        strng = open(files) #We need to open the files
        for lines in strng.readlines(): #We then need to read the files
            if re.search(pattern, lines): #If we find the pattern we are looking for
                print re.split(r'=', lines)[1] #We split using as a delimeter the = sign.
                try:

                    status_link = urllib2.urlopen(re.split(r'=', lines)[1]).getcode()
                    #if  status_link == 200: #Check if the site is up
                    status_list.append(status_link)#View notes starting on line 29 for these conditionals
                    print status_list
                    print files
                    if len(files_list) > len(status_list): #View notes starting on line 29 for these conditionals
                        print "Not all Links are up - ABORTING"
                        exit();
                    if len(files_list) < len(status_list):
                        print "Not all Links are up - ABORTING"
                        exit();
                    elif len(files_list) == len(status_list):
                        print "Will run yum installer" #here we can call other functions since everything seems to be working fine.
                        #call the yum function here
                except StandardError as er:#Here we are capturing errors of other codes.
                    print "Site down"
                    print status_link



yum_link_Search()
Share Button

Python: Check IPs for DNS entries and see if host is UP or DOWN. Export info to a CSV file

Checks IPs if they have a DNS entry. If they do it checks if the host is up. If an IP does not have a DNS entry it reports it. It does not report if an IP with out a DNS is down or not

import socket
import subprocess
import  netifaces
import csv
 
def checkPING(IP):
    try:
        ping = subprocess.check_output(['ping', '-c1', ip])
        return "Host is UP"
    except:
        return "Host is DOWN"
 
def checkDNS(IP):
    try:
        dns = socket.gethostbyaddr(ip)
        return dns[0]
    except:
        return "No DNS entry found"
 
 
ip_list = []
dns_list = []
status_list = []
 
csvfile= open('file.csv', 'w')
for loop_ip in range (62):
    ip = '10.0.0.%d' % loop_ip
    print ip, checkDNS(ip), checkPING(ip)
    #data = ip+" ",checkDNS(ip)+" ",checkPING(ip)
    #data_list.append(data)
    ip_list.append(ip)
    dns_list.append(checkDNS(ip))
    status_list.append(checkPING(ip))
writer = csv.writer(csvfile, dialect='excel')
writer.writerows(zip(ip_list, dns_list, status_list))
 
 
'''
#These lines will print each element of the list on a row
writer.writerow(ip_list)
writer.writerow(dns_list)
writer.writerow(status_list)
'''
 
'''
#Also possible threading implementation
for threads_1 in range(1):
    threads_ping.append(threading.Thread(target=executeCheck()))
    threads_ping[-1].start


for threads_2 in range(num_dns):
    threads_dns = append(threading.Thread(target=checkDNS))
    threads_dns.start()

for threads in threads_ping:
    threads.join()

for threads in threads_dns:
    threads.join()
'''
 
OUTPUT:
10.0.0.5 No DNS entry found Host is UP
10.0.0.6 No DNS entry found Host is DOWN
10.0.0.7 No DNS entry found Host is DOWN
Share Button

Python /PostgresqSQL 9.4 – Server Performance Data Capture V.1

These scripts gather static and dynamic information from servers and insert that data into a PostgresSQL database. The static information is information that unless a major upgrade takes place it never changes. They Dynamic data is performance data of the servers. The purpose of the static data is to be able to query for dynamic information which being inserted to the database every X amount of minutes via cron.

These scripts work only on Linux/Unix based machines.

The static information for the remote servers are as follows :

  • Hostname
  • IP
  • Operating System
  • Operating System Version
  • Total Amount of RAM
  • Total Amount of HD space
  • CPU Type
  • MAC ADDRESS

The dynamic information for the remote servers are as follows :

  • Memory Usage
  • Storage Usage
  • Swap Usage
  • Time the data was captured

The static.py script will need to be run only one time on the remote servers or when a major upgrade occurs that might change configuration regarding RAM, Partitions, IP, Operating System (even an upgrade), CPU, NIC replacement.

The dynamic.py script will be run on the remote servers via cron. It is the script that captures information which is being constantly changed, like memory, storage, swap usage. All this data is sent for insertion to the remote database. The script executes it self via cron and then sent to be inserted into a PostgreSQL database.

In both scripts data is entered into a dictionary and then a connection to the database is created in order to insert the data.

The install.py script is not required to be run but it does several things:

  • Installs the PostgresSQL 9.4 database.
  • Creates the database and the role assigned to the database.
  • Creates the Tables needed
  • Configures the postgres.conf file to listen to a specific IP address and port.

However, if you wish to manually setup the database, the SQL script is provided at the bottom of this page. The script is called server_perf.sql

The static.py Script.

#!/usr/bin/python
import psutil
import os
import math
import platform
import subprocess
import socket
import psycopg2
import netifaces as ni

from decimal import *

#Used by the convert values function. It is needed to provide human readable data for the RAM amount produced.
factor = 1024

def Host_name():
    #Get the hostname
    hostname = socket.gethostname()
    return hostname

def IP_address():
    #Get the IP for ETH0
    ni.ifaddresses('eth0')
    ip = ni.ifaddresses('eth0')[2][0]['addr']
    return ip

def OS_make():
    #CPU INFORMATION. Provides the OS make (Windows, Ubunty, RH, Centos, etc)
    #cpu_version = platform.dist()[0]+" "+platform.dist()[1]
    cpu_version = platform.dist()[0]
    return  cpu_version

def OS_Version():
    #Provides the version of the OS.
    os_version = platform.dist()[1]
    return os_version

def Virtual_memory_total():
    #Provides total amount of RAM
    cvr_info = psutil.virtual_memory().total
    return round(cvr_info, 2)

def convert_values():
    #Converts the KB to GB and presents it as such.
    cvr_info = Virtual_memory_total()
    i = int(math.log(cvr_info)/math.log(factor))
    result = float(cvr_info)/factor**i
    return result

def get_processor_info():
    #Gets the verbose info about the CPU (AMD FX(tm)-6350)
    if platform.system() == "Windows":
        return platform.processor()
    elif platform.system() == "Darwin":
        return subprocess.check_output(['/usr/sbin/sysctl', "-n", "machdep.cpu.brand_string"]).strip()
    elif platform.system() == "Linux":
        command = "cat /proc/cpuinfo | grep 'model name' | head -1 | awk -F: '{print $2}'"
        return subprocess.check_output(command, shell=True).strip()
    return ""

#Adjusting precision and then appending to list information taken from Virtual_memory_total function
mem_frmt =   "{0:.2f}".format(convert_values())

#Create the Database PostgreSQL 9.4 connection.
conn = psycopg2.connect("host='10.0.0.41' dbname='serverinfo' user='gmastrokostas'")
cur = conn.cursor() #Create the cursor
#Create a Dictionary to pass the value of each function.
server_info = {'hostname': Host_name(), 'IP': IP_address(), 'OS':OS_make(), 'OS_V':OS_Version(), 'RAM':mem_frmt, 'CPU':get_processor_info()}
cur.execute("INSERT INTO servers(hostname, IP, OS, OS_Version, RAM, CPU) VALUES('%s','%s','%s','%s','%s','%s')" % (Host_name(),IP_address(), OS_make(), OS_Version(), mem_frmt, get_processor_info() ))

#If this is not present the changes will not get commited.
conn.commit()

The Dynamic.py script

import psutil
import os
import math
import platform
import subprocess
import socket
import psycopg2
import netifaces as ni
 
def Host_name():
    #Get the hostname
    hostname = socket.gethostname()
    return hostname
 
def IP_address():
    #Get the IP for ETH0
    ni.ifaddresses('eth0')
    ip = ni.ifaddresses('eth0')[2][0]['addr']
    return ip
 
factor = 1024
def Virtual_memory_use():
    #Provides total amount of RAM
    cvr_info = psutil.virtual_memory().used
    #cvr_info_ram = Virtual_memory_use()
    i = int(math.log(cvr_info)/math.log(factor))
    result_ram = float(cvr_info)/factor**i
    return round(result_ram)
memory_used_frmt =   "{0:.2f}".format(Virtual_memory_use())
 
def Partition_root_Usedspace():
    #Provides total amount of RAM
    cvr_info = psutil.disk_usage('/').used
    #cvr_info_ram = Virtual_memory_use()
    i = int(math.log(cvr_info)/math.log(factor))
    result_ram = float(cvr_info)/factor**i
    return round(result_ram)
hd_used_frmt =   "{0:.2f}".format(Partition_root_Usedspace())
 
 
def CPU_general_usage():
    #CPU INFORMATION.
    cpu_prec_usage = psutil.cpu_percent(interval=0)
    return cpu_prec_usage
 
def Swap_memory_usage():
    #SWAP SIZE USAGE
    swap_mem_full_num = psutil.swap_memory().used
    return swap_mem_full_num
 
 
conn = psycopg2.connect("host='10.0.0.41' dbname='serverinfo' user='gmastrokostas'")
cur = conn.cursor() #Create the cursor
#Create a Dictionary to pass the value of each function.
server_info = {'hostname': Host_name(), 'IP': IP_address(),'MEM_USE':memory_used_frmt, 'HD_USE':hd_used_frmt, 'CPU_USE':CPU_general_usage(), 'SWP_USE':Swap_memory_usage()}
cur.execute("INSERT INTO SERVER_PERF(hostname, IP, MEM_USE, HD_USE, CPU_USE, SWP_USE) VALUES('%s','%s','%s','%s','%s','%s')" % (Host_name(),IP_address(), memory_used_frmt, hd_used_frmt, CPU_general_usage(), Swap_memory_usage()  ))
 
#If this is not present the changes will not get commited.
conn.commit()

The Install.py Script.

#!/usr/bin/python
import subprocess
import os
import pwd
import grp
from pwd import getpwnam
import psycopg2
import re
import time

def install_PostgresSQL():
#This function checks if the Postgres RPM package is installed or not.
#If not it will then download the RPM package and then installs the packages nde     d
#for the postgresSQL database.
    try:

        if subprocess.call(["rpm", "-q", "pgdg-centos94"]) == 0:
            print "POSTGRES-SQL IS ALREADY INSTALLED"

        elif subprocess.call(["rpm", "-q", "pgdg-centos94"]) == 1:
            subprocess.call(["yum", "localinstall", "-y", "http://yum.postgresql.org/9.4/redhat/rhel-7-x86_64/pgdg-centos94-9.4-1.noarch.rpm"])
            subprocess.call(["yum", "install", "-y", "postgresql94-contrib", "postgresql94-server"])
            print "#################################################################"
            post_install_PostgreSQL_create_postgres_Directories()

    except Exception as e:

        print e
        print "Install of RPM Packages failed. Exiting with Error at function Install_PostgresSQL"
        exit(0)

def post_install_PostgreSQL_create_postgres_Directories():
#This function will create the directories from where the database will run fr.
#It will also adjust the permissions of these directories so they will be owneby      user postgres.
#It will also initialize the database to the directory created and also start e      database.
    try:
        #Setup directories and set permissions
        parent_dir = '/home/postgres'
        directory = '/home/postgres/postgres94'
        postgres_user = pwd.getpwnam('postgres')[2]
        postgres_group = grp.getgrnam('postgres')[2]

        #Check if the postgres home directory exists. If it doesn't create it. If it does then exit.
        if not os.path.exists(directory):
            os.makedirs(directory)
            os.chown(parent_dir, postgres_user, postgres_group)
        else:
            exit(0)
        #Change Recursively all permissions of the /home/postgres directory to be owned by user postgres
        for dirpath, dirname, filenames in os.walk(parent_dir):

            for loop_dir in dirname:
                path = os.path.join(dirpath, loop_dir)
                os.chown(path, postgres_user, postgres_user )

        post_install_PostgreSQL_start_database()

    except Exception as e:

        print e
        print "Error in Post Install steps. Exiting with Error at function post_install_PostgreSQL_create_postgres_Directories "

def post_install_PostgreSQL_start_database():
    try:
        os.system("su postgres -c '/usr/pgsql-9.4/bin/initdb -D /home/postgres/postgres94'")
        #os.system("su postgres -c '/usr/pgsql-9.4/bin/pg_ctl -D /home/postgres/postgres94 -l /home/postgres/postgres94/logfile start'")
        print "INITDB DATABASE"
        post_install_PostgreSQL_configure_postgres_files()

    except  Exception as e:
        print e
        print "Exiting with error at function post_install_PostgreSQL_start_database"

def post_install_PostgreSQL_configure_postgres_files():

    try:
        ##Configure the listening address and the port on the postgres.conf file
        text_ip = "#listen_addresses = 'localhost'"
        subtext_ip = "listen_addresses = '10.0.0.27'"
        text_port = "#port = 5432"
        subtext_port = "port = 5432"

        s = open("/home/postgres/postgres94/postgresql.conf").read()
        s = s.replace(text_ip, subtext_ip)
        s = s.replace(text_port, subtext_port)
        f = open("/home/postgres/postgres94/postgresql.conf", 'w')
        f.write(s)
        f.close()


        s = open("/home/postgres/postgres94/postgresql.conf").read()
        s = s.replace(text_port, subtext_port)
        f = open("/home/postgres/postgres94/postgresql.conf", 'w')
        f.write(s)
        f.close()
        ###########END#########################

        #Edit the pg_hba.conf file to setup from whom the server will accept connections from
        list = ["host    all             all             10.0.0.0/26             trust", "host    all             all             127.0.0.1/32            trust"]

        f = open('/home/postgres/postgres94/pg_hba.conf', "a")
        for items in list:

            f.write(items+'\n')
            f.close
        #########END#########################

        time.sleep(30)

    except Exception as e:
        print e
        print "Exiting with error at function post_install_PostgreSQL_configure_postgres_files"



def create_Database():

    try:
        os.system("su postgres -c '/usr/pgsql-9.4/bin/pg_ctl -m fast -D /home/postgres/postgres94  -l /home/postgres/postgres94/logfile start'")
        print "STARTING DATABASE FOR THE FIRST TIME"
        time.sleep(60)
        conn = psycopg2.connect("host='10.0.0.27' dbname='postgres' user='postgres'")
        cur = conn.cursor() #Create the cursor
        conn.autocommit = True
        cur.execute("""CREATE DATABASE SERVERS""")
        cur.execute("""CREATE ROLE serverseek WITH PASSWORD 'Password!'""")
        cur.execute("""ALTER DATABASE SERVERS OWNER TO SERVERSEEK""")
        cur.execute("""ALTER ROLE SERVERSEEK WITH LOGIN""")
        cur.execute("""GRANT ALL PRIVILEGES ON DATABASE SERVERS to serverseek""")
        conn.commit()
        conn.close()

        conn = psycopg2.connect("host='10.0.0.27' dbname='servers' user='serverseek'")
        cur = conn.cursor() #Create the cursor
        cur.execute("CREATE TABLE SERVERS( HOSTNAME        TEXT NOT NULL, IP              INET NOT NULL, OS              TEXT, OS_VERSION              TEXT, RAM             NUMERIC(10,2), HD_TOTAL_SPACE  NUMERIC(10,2), CPU             TEXT, MAC             MACADDR, TIME_CAPTURED   TIMESTAMP DEFAULT CURRENT_TIMESTAMP, CONSTRAINT PK_HOSTNAME PRIMARY KEY (HOSTNAME));")
        cur.execute("CREATE TABLE SERVER_PERF( HOSTNAME        TEXT NOT NULL REFERENCES SERVERS (HOSTNAME), IP              INET NOT NULL, MEM_USE         NUMERIC(10,2), HD_USE          NUMERIC(10,2), CPU_USE         NUMERIC(10,2), SWP_USE         NUMERIC(10,2), TIME_CAPTURED   TIMESTAMP DEFAULT CURRENT_TIMESTAMP);")
        #If this is not present the changes will not get commited.
        conn.commit()
        conn.close()
        #for row in cur: #Enable the following two lines to view query results
        #    print rows
    except Exception as e:
        print e
        print "Exiting with error at function create_Database"

########################
install_PostgresSQL()
########################
create_Database()

The server_perf.sql script, is the SQL script needed to create the role, database, tables in the postgresSQL database. IMPORTANT If you run the install.py script you do not need to run this SQL script. The install.py does this for you.

#Following SQL script creates role-database and adjusts ownership of the new database.
#As user Postgres Run the following
CREATE DATABASE SERVERS;
CREATE ROLE serverseek WITH PASSWORD 'YourPassword';
ALTER DATABASE SERVERS OWNER TO SERVERSEEK;
ALTER ROLE SERVERSEEK WITH LOGIN;
GRANT ALL PRIVILEGES ON DATABASE SERVERS to SERVERSEEK;

#Following SQL script creates the table needed for the script to insert the data it captures.
#As user  SERVERSEEK run the following.
CREATE TABLE SERVERS
(
HOSTNAME        TEXT NOT NULL,
IP              INET NOT NULL,
OS              TEXT,
OS_VERSION              TEXT,
RAM             NUMERIC(10,2),
HD_TOTAL_SPACE  NUMERIC(10,2),
CPU             TEXT,
MAC             MACADDR,
TIME_CAPTURED   TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT PK_HOSTNAME PRIMARY KEY (HOSTNAME)
);
ALTER TABLE SERVERS
        OWNER TO SERVERSEEK;

CREATE TABLE SERVER_PERF
(
HOSTNAME        TEXT NOT NULL REFERENCES SERVERS (HOSTNAME),
IP              INET NOT NULL,
MEM_USE         NUMERIC(10,2),
HD_USE          NUMERIC(10,2),
CPU_USE         NUMERIC(10,2),
SWP_USE         NUMERIC(10,2),
TIME_CAPTURED   TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
ALTER TABLE SERVER_PERF
        OWNER TO SERVERSEEK;



——————————————————————————————————
MODIFIED VERSIONS OF STATIC AND DYNAMIC PYTHON SCRIPTS. MEANT TO BE DEPLOYED ON VERSION 2 OF THE OVERALL PACKAGE
——————————————————————————————————
STATIC.PY


#!/usr/bin/python
import psutil
import os
import math
import platform
import subprocess
import socket
#import psycopg2
import netifaces as ni
import humanize
from cpuinfo import cpuinfo


class Static():
    def __init__(self):
        #NIC INFO
        self.hostname   = socket.gethostname()
        self.iface      = ni.interfaces()[1]
        self.ipaddress  = ni.ifaddresses(self.iface)[2][0]['addr']
        #---OS INFO
        self.osinfo_1   = platform.dist()

        self.osinfo_2_os    = platform.uname()[0]
        self.osinfo_2_ver   = platform.uname()[1]
        self.osinfo_2_rel   = platform.uname()[2]
        #----RAM INFO
        raw_totalM = psutil.virtual_memory().total
        self.total_M    = humanize.naturalsize(raw_totalM)
        #----CPU INFO
        self.info       = cpuinfo.get_cpu_info()
        self.brand      = self.info['brand']
        self.Mhz        = self.info['count']
        self.arch       = self.info['bits']

    def get_OS_make(self):
       if platform.system()     =="Windows":
           return self.osinfo_2_os, self.osinfo_2_ver, self.osinfo_2_rel
       elif platform.system()  =="Linux":
           return self.osinfo_1

host = Static()
'''
print host.hostname
print host.iface
print host.ipaddress
print host.osinfo_1
print host.osinfo_2_os
print host.osinfo_2_ver
print host.osinfo_2_rel
print host.total_M
print host.brand
print host.Mhz
print host.arch
'''
print host.get_OS_make()

DYNAMIC.PY

#THIS FILE IS MANAGED BY PUPPET
#!/usr/bin/python
import psutil
import os
import math
import platform
import subprocess
import socket
#import psycopg2
import netifaces as ni
import humanize
from cpuinfo import cpuinfo

class Dynamic():
    def __init__(self):
        #NIC INFO
        self.hostname   = socket.gethostname()
        self.iface      = ni.interfaces()[1]
        self.ipaddress  = ni.ifaddresses(self.iface)[2][0]['addr']
        #RAM USAGE INFO
        self.total_ram_raw       = humanize.naturalsize(psutil.virtual_memory().total)
        self.used_ram_raw        = humanize.naturalsize(psutil.virtual_memory().used)
        self.used_ram_perc       = psutil.virtual_memory().percent
        #HD USAGE INFO
        self.total_HD_raw        = humanize.naturalsize(psutil.disk_usage('/').total)
        self.used_HD_raw         = humanize.naturalsize(psutil.disk_usage('/').used)
        self.used_HD_perc        = psutil.disk_usage('/').percent
        #CPU USAGE INFO
        self.cpu_use_perc        = psutil.cpu_percent()
        #SWAP USAGE INFO
        self.swap_used           = humanize.naturalsize(psutil.swap_memory().used)
        self.swap_perc           = psutil.swap_memory()[3]
        self.swap_total          = humanize.naturalsize(psutil.swap_memory().total)
    def export_to_csv(self):
        print self.hostname


info = Dynamic()

Share Button