• Welcome to Overclockers Forums! Join us to reply in threads, receive reduced ads, and to customize your site experience!

/bin

Overclockers is supported by our readers. When you click a link to make a purchase, we may earn a commission. Learn More.
logwatch email parser

This is a very dirty dirty hack job on the GUI. I built a CLI version of this which was much cleaner programatically

At any rate it works for what I need it to do

Main program

Code:
#!/usr/bin/python
#This program provides a graphical front end for some basic parsing of logwatch emails exported from
#Outlook/email clients
import pygtk
import gtk
import gobject
import gtk.glade

#When pulling files in from glade you need to refer to the objects as follows
# local_object_name = self.builder.get_object("name_in_glade")

server = []
class startUI:
    sort_order = gtk.SORT_ASCENDING
    ########################This makes sure that the buttons work and call their proper functions
    def connect_signals(self):
        #Have the buttons start 'listening' for user interaction
        self.button_exit = self.builder.get_object("button_exit")
        self.button_report = self.builder.get_object("button_report")
        self.select_file = self.builder.get_object("select_file")
        self.button_report.connect("clicked", self.report_button_click)
        self.button_exit.connect("clicked", self.exit_program)
        self.select_file.connect("file-set", self.generate_server_list)        
    ##########################
        
    ##########################This generates The list of servers to report on
    def generate_server_list(self, widget, callback_data=None):
        #I am making these global so that I can call them outside of this function
        global samba_msg
        global httpd_msg
        global clam_msg
        global pam_msg
        global SSHD_msg
        global iptables_msg
        global disk_space_msg
        
        #These are the tuples which will hold the messages from logwatch
        samba_msg = []
        httpd_msg = []
        clam_msg = []
        pam_msg = []
        SSHD_msg = []
        iptables_msg = []
        disk_space_msg = []
        server = []

        samba_flag = 0
        httpd_flag = 0
        clam_flag = 0
        pam_flag = 0
        SSHD_flag = 0
        iptables_flag = 0
        disk_space_flag = 0
        
        #In order to make sure that the elements from servers match up properly with the report elements
        #We need to input our own message if one of the services is missing
        #These True/False variables help to track whether each service is present for a given host
        samba_is_present = False
        httpd_is_present = False
        clam_is_present = False
        pam_is_present = False
        SSHD_is_present = False
        iptables_is_present = False
        disk_space_is_present = False

        #This section is to exclude duplicate lines of common problematic entries
        spacer = False
        popup = False
        search_inventory = False
        compare_rate = False
        choose_model = False

        #To get the name of the file that is selected by the user, use .get_filename()
        filename = self.select_file.get_filename()
        for line in open(filename).readlines():
            if "Logwatch for" in line:
                server_name = line.split()[2]
                #Check the list of servers, if the current server is in the list, skip it
                #Else add it to the list for later summerization
                if line in server:
                    pass
                else:
                    server.append(server_name)
            #Find the end of the report and clearly mark it
            elif "# Logwatch End #" in line:
                #If a service is not present, make sure to make it as False and then add an entry
                #Declaring the service missing
                if samba_is_present == False:
                    append_me = (server_name, "\___ No Samba", "")
                    samba_msg.append(append_me)
                if httpd_is_present == False:
                    append_me = (server_name, "\___ No httpd", "")
                    httpd_msg.append(append_me)
                if clam_is_present == False:
                    append_me = (server_name, "\___ No clam", "")
                    clam_msg.append(append_me)
                if pam_is_present == False:
                    append_me = (server_name, "\___ No pam", "")
                    pam_msg.append(append_me)
                if SSHD_is_present == False:
                    append_me = (server_name, "\___ No SSHD", "")
                    SSHD_msg.append(append_me)
                if iptables_is_present == False:
                    append_me = (server_name, "\___ No iptables", "")
                    iptables_msg.append(append_me)
                #At the end of each loop we want to assume that all of the services are missing
                #on the next host so set all of the services to false
                samba_is_present = False
                httpd_is_present = False
                clam_is_present = False
                pam_is_present = False
                SSHD_is_present = False
                iptables_is_present = False
            #Dont print the logwatch@server line
            elif "logwatch@" in line:
                pass
            #Look for the samba messages and store them in an array
            if "-- samba" in line and not samba_flag:
                    #clear the samba variable. This variable is used to collect all the lines
                    #Relating to the samba report so that we can later add a single variable to the array
                    samba_line = ''
                    samba_is_present = True
                    samba_flag = 1
                    continue
            if samba_flag and not "-- samba" in line:
                    #Add each line between samba Begin and samba End to the samba_line variable
                    samba_line += line.rstrip()
            if "-- samba" in line and samba_flag:
                    samba_flag = 0
                    samba_line = (server_name, "Samaba", samba_line)
                    samba_msg.append(samba_line)
                    continue
            if "-- httpd" in line and not httpd_flag:
                    #clear the httpd variable. This variable is used to collect all the lines
                    #Relating to the httpd report so that we can later add a single variable to the array
                    httpd_line = ""
                    httpd_is_present = True
                    httpd_flag = 1
                    continue
            if httpd_flag and not "-- httpd" in line:
                    #Add each line between httpd Begin and httpd End to the httpd_line variable
                    #Below are the series of key words which make reading the httpd reports useless
                    #Therefore we want to reduce the duplicates to a single line so the reports are much shorter
                    if "spacer.gif" in line and spacer == False:
                        spacer = True
                        httpd_line += line.rstrip() + " "
                    elif "spacer.gif" in line and spacer == True:
                        pass
                    elif "popup_close" in line and popup == False:
                        popup = True
                        httpd_line += line.rstrip() + " "
                    elif "popup_close" in line and popup == True:
                        pass
                    elif "getsearchnewinventory_as" in line and search_inventory == False:
                        search_inventory = True
                        httpd_line += line.rstrip() + " "
                    elif "getsearchnewinventory_as" in line and search_inventory == True:
                        pass
                    elif "compareRate" in line and compare_rate == False:
                        compare_rate = True
                        httpd_line += line.rstrip() + " "
                    elif "compareRate" in line and compare_rate == True:
                        pass
                    elif "choose_model" in line and choose_model == False:
                        choose_model = True
                        httpd_line += line.rstrip() + " "
                    elif "choose_model" in line and choose_model == True:
                        pass
                    
                    else:
                        httpd_line += line.rstrip() + "\n "  
                   
            if "-- httpd" in line and httpd_flag:
                    httpd_flag = 0
                    httpd_line = (server_name, "httpd", httpd_line)
                    httpd_msg.append(httpd_line)
                    continue
            if "-- clam" in line and not clam_flag:
                    #clear the clam variable. This variable is used to collect all the lines
                    #Relating to the clam report so that we can later add a single variable to the array
                    clam_line = ""
                    clam_is_present = True
                    clam_flag = 1
                    continue
            if clam_flag and not "-- clam" in line:
                    #Add each line between clam Begin and clam End to the clam_line variable
                    clam_line += line + " "
            if "-- clam" in line and clam_flag:
                    clam_flag = 0
                    clam_line = (server_name, "clam", clam_line)
                    clam_msg.append(clam_line)
                    continue
            if "-- pam" in line and not pam_flag:
                    #clear the pam variable. This variable is used to collect all the lines
                    #Relating to the pam report so that we can later add a single variable to the array
                    pam_line = ""
                    pam_is_present = True
                    pam_flag = 1
                    continue
            if pam_flag and not "-- pam" in line:
                    #Add each line between pam Begin and pam End to the pam_line variable
                    pam_line += line
            if "-- pam" in line and pam_flag:
                    pam_flag = 0
                    pam_line = (server_name, "pam", pam_line)
                    pam_msg.append(pam_line)
                    continue
            if "-- SSHD" in line and not SSHD_flag:
                    #clear the SSHD variable. This variable is used to collect all the lines
                    #Relating to the SSHD report so that we can later add a single variable to the array
                    SSHD_line = ""
                    SSHD_is_present = True
                    SSHD_flag = 1
                    continue
            if SSHD_flag and not "-- SSHD" in line:
                    #Add each line between SSHD Begin and SSHD End to the SSHD_line variable
                    if "time" in line:
                        try:
                            hostname = line.split(":")[0]
                            line = line.replace(hostname,str(socket.gethostbyaddr(hostname)[0]))
                        except:
                            pass
                    SSHD_line += line + " "
            if "-- SSHD" in line and SSHD_flag:
                    SSHD_flag = 0
                    SSHD_line = (server_name, "ssh", SSHD_line)
                    SSHD_msg.append(SSHD_line)
                    continue                
            if "-- iptables" in line and not iptables_flag:
                    #clear the iptables variable. This variable is used to collect all the lines
                    #Relating to the iptables report so that we can later add a single variable to the array
                    iptables_line = ""
                    iptables_is_present = True
                    iptables_flag = 1
                    continue
            if iptables_flag and not "-- iptables" in line:
                    #Add each line between iptables Begin and iptables End to the iptables_line variable
                    if "packet" in line:
                        hostname = line.split("-")[0].split()[1]
                        if "." in hostname:
                            try:
                                #this section is intended to do a dns lookup
                                #It slows down the whole process so it is disabled by default
                                #line = line.replace(hostname,str(socket.gethostbyaddr(hostname)[0]))
                                #I dont really care about firewall alerts generated from within my domain
                                if "autodata" in line:
                                    line = ""
                            except:
                                #print line
                                pass
                        iptables_line += line + " "
            if "-- iptables" in line and iptables_flag:
                    iptables_flag = 0
                    iptables_line = (server_name, "iptables", iptables_line)
                    iptables_msg.append(iptables_line)
                    continue
                    
            if "-- Disk Space" in line and not disk_space_flag:
                    #clear the disk_space variable. This variable is used to collect all the lines
                    #Relating to the disk_space report so that we can later add a single variable to the array
                    disk_space_line = ""
                    disk_space_is_present = True
                    disk_space_flag = 1
                    continue
            if disk_space_flag and not "-- Disk Space" in line:
                    #Add each line between disk_space Begin and disk_space End to the disk_space_line variable
                    disk_space_line += line + " "
            if "-- Disk Space" in line and disk_space_flag:
                    disk_space_flag = 0
                    disk_space_line = (server_name, "Disk Space", disk_space_line)
                    disk_space_msg.append(disk_space_line)
                    continue            
        #we create then store and append our elements
        liststore1 = self.builder.get_object("liststore1")
        #Go through the server list and append them to the liststore
        x = 0
        while x < len(server):
            liststore1.append([server[x]])
            x +=1
        self.treeview = self.builder.get_object("treeview1")
        self.treeview.get_selection().set_mode(gtk.SELECTION_MULTIPLE)
        cell = self.builder.get_object("cell")
        col = self.builder.get_object("col1")
        col.set_attributes(cell,text=0)
        
        #I am cheating and hard coding the choices from logwatch
        #These will appear about the same time as the email text file is selected by the user
        liststore2 = self.builder.get_object("liststore2")
        report_list = ["Samba", "httpd", "clam", "pam", "SSHD", "iptables", "Disk Space"]
        y = 0
        while y < len(report_list):
            liststore2.append([report_list[y]])
            y += 1
        self.treeview2 = self.builder.get_object("treeview2")
        self.treeview2.get_selection().set_mode(gtk.SELECTION_MULTIPLE)
        cell2 = self.builder.get_object("cell2")
        col2 = self.builder.get_object("col2")
        col2.set_attributes(cell2, text=0)
        
    ##################
    def get_user_selections(self, widget, callback_data=None):
        #This is the output screen
        self.gladefile2 = "report.glade"
        self.builder2 = gtk.Builder()
        self.builder2.add_from_file(self.gladefile2)
        self.window3 = self.builder2.get_object("window3")
        liststore3 = self.builder2.get_object("liststore3")
        report_selection = self.treeview2.get_selection()
        host_selection = self.treeview.get_selection()
        #get_selected returns <treeview memory address>, value so we assign both but discard the address
        host_model, host_rows = host_selection.get_selected_rows()
        host_list = []
        #Loop through the rows and extract the values for the host_list
        for row in host_rows:
            iter = host_model.get_iter(row)
            #In this particular case, I need to store the index of the host
            #in a tupple so that I can later match the host with the correct report
            append_msg = host_model.get_value(iter, 0), row
            host_list.append(append_msg)

        report_list = []
        report_model, report_rows = report_selection.get_selected_rows()
        for row in report_rows:
            iter = report_model.get_iter(row)
            report_list.append(report_model.get_value(iter, 0))
        
        for host in host_list:
            #I am extracting the host index away so that I can match it with the correct report
            host_index = str(host[1]).strip("(").strip(")").strip(",")
            #Since I was designing the GUI on the fly, I mixed up the order of the columns
            #So I simply rearranged the order which the elements are appended to the liststore
            for report in report_list:
                if "Samba" in report:
                    liststore3.append([samba_msg[int(host_index)][1], samba_msg[int(host_index)][0], samba_msg[int(host_index)][2]])
                elif "Disk Space" in report:
                    liststore3.append([disk_space_msg[int(host_index)][1], disk_space_msg[int(host_index)][0],disk_space_msg[int(host_index)][2]])
                    
                elif "pam" in report:
                    liststore3.append([pam_msg[int(host_index)][1], pam_msg[int(host_index)][0], pam_msg[int(host_index)][2]])
                elif "clam" in report:
                    liststore3.append([clam_msg[int(host_index)][1], clam_msg[int(host_index)][0], clam_msg[int(host_index)][2]])
                elif "iptables" in report:
                    liststore3.append([iptables_msg[int(host_index)][1], iptables_msg[int(host_index)][0], iptables_msg[int(host_index)][0]])
                elif "SSHD" in report:
                    liststore3.append([SSHD_msg[int(host_index)][1], SSHD_msg[int(host_index)][0], SSHD_msg[int(host_index)][2]])
                elif "httpd" in report:
                    liststore3.append([httpd_msg[int(host_index)][1], httpd_msg[int(host_index)][0], httpd_msg[int(host_index)][2]])
        self.treeview3 = self.builder2.get_object("treeview3")
        self.treeview3.get_selection().set_mode(gtk.SELECTION_MULTIPLE)
        
        cell3 = self.builder2.get_object("cell3")
        col3 = self.builder2.get_object("col3")
        col3.set_resizable(True)
        col3.set_reorderable(True)
        col3.set_attributes(cell3, text=0)
        
        cell4 = self.builder2.get_object("cell4")
        col4 = self.builder2.get_object("col4")
        col4.set_resizable(True)
        col4.set_reorderable(True)
        col4.set_attributes(cell4, text=1)
        
        cell5 = self.builder2.get_object("cell5")
        col5 = self.builder2.get_object("col5")
        col5.set_resizable(True)
        col5.set_reorderable(True)
        col5.set_attributes(cell5, text=2)
          
        self.window3.show()
    ####################Quit the program
    def exit_program(self, widget, callback_data=None):
        gtk.main_quit()
    #####################
    #####################This will handle generating the reports
    def report_button_click(self, widget, callback_data=None):
        #I am purposely suppressing errors generated by this function because of poor programming/inability to resolve the error
        try:
            self.treeview.connect('cursor-changed', self.get_user_selections(self.treeview))
            self.treeview2.connect('cursor-changed', self.get_user_selections(self.treeview2))
        except Exception, e:
            pass

    ####################This is the main initialization which loads the initial gui    
    def __init__(self):
        #This is the initial window
        self.gladefile = "selection_screen.glade"
        self.builder = gtk.Builder()
        self.builder.add_from_file(self.gladefile)
        self.connect_signals()
        #We have to pull the items from the glade file and associate them 
        #with a variable. In this case self.button_XYZ
        self.select_file = self.builder.get_object("select_file")
        self.window = self.builder.get_object("window1")
        self.window.show()
    ###################  

if __name__ == "__main__":
  main = startUI()
  gtk.main()

selection_screen.glade

Code:
<?xml version="1.0" encoding="UTF-8"?>
<interface>
  <requires lib="gtk+" version="2.24"/>
  <!-- interface-naming-policy project-wide -->
  <object class="GtkListStore" id="liststore1">
    <columns>
      <!-- column-name col1 -->
      <column type="gchararray"/>
    </columns>
  </object>
  <object class="GtkListStore" id="liststore2">
    <columns>
      <!-- column-name col2 -->
      <column type="gchararray"/>
    </columns>
  </object>
  <object class="GtkWindow" id="window1">
    <property name="can_focus">False</property>
    <property name="window_position">center-always</property>
    <property name="default_width">500</property>
    <child>
      <object class="GtkVBox" id="vbox1">
        <property name="visible">True</property>
        <property name="can_focus">False</property>
        <child>
          <object class="GtkLabel" id="select_label">
            <property name="visible">True</property>
            <property name="can_focus">False</property>
            <property name="label" translatable="yes">Please select the email to parse</property>
          </object>
          <packing>
            <property name="expand">True</property>
            <property name="fill">True</property>
            <property name="position">0</property>
          </packing>
        </child>
        <child>
          <object class="GtkFileChooserButton" id="select_file">
            <property name="visible">True</property>
            <property name="can_focus">False</property>
          </object>
          <packing>
            <property name="expand">True</property>
            <property name="fill">True</property>
            <property name="position">1</property>
          </packing>
        </child>
        <child>
          <object class="GtkLabel" id="report_label">
            <property name="visible">True</property>
            <property name="can_focus">False</property>
            <property name="label" translatable="yes">Please select which host(s) you want the report for:</property>
          </object>
          <packing>
            <property name="expand">True</property>
            <property name="fill">True</property>
            <property name="position">2</property>
          </packing>
        </child>
        <child>
          <object class="GtkTreeView" id="treeview1">
            <property name="visible">True</property>
            <property name="can_focus">True</property>
            <property name="model">liststore1</property>
            <property name="reorderable">True</property>
            <property name="search_column">0</property>
            <property name="enable_tree_lines">True</property>
            <child>
              <object class="GtkTreeViewColumn" id="col1">
                <property name="resizable">True</property>
                <property name="title" translatable="yes">Servers:</property>
                <property name="expand">True</property>
                <property name="clickable">True</property>
                <property name="reorderable">True</property>
                <property name="sort_indicator">True</property>
                <child>
                  <object class="GtkCellRendererText" id="cell"/>
                </child>
              </object>
            </child>
          </object>
          <packing>
            <property name="expand">True</property>
            <property name="fill">True</property>
            <property name="position">3</property>
          </packing>
        </child>
        <child>
          <object class="GtkLabel" id="label1">
            <property name="visible">True</property>
            <property name="can_focus">False</property>
            <property name="label" translatable="yes">Please select the reports you want to run:</property>
          </object>
          <packing>
            <property name="expand">True</property>
            <property name="fill">True</property>
            <property name="position">4</property>
          </packing>
        </child>
        <child>
          <object class="GtkTreeView" id="treeview2">
            <property name="visible">True</property>
            <property name="can_focus">True</property>
            <property name="model">liststore2</property>
            <child>
              <object class="GtkTreeViewColumn" id="col2">
                <property name="title" translatable="yes">Reports:</property>
                <child>
                  <object class="GtkCellRendererText" id="cell2"/>
                </child>
              </object>
            </child>
          </object>
          <packing>
            <property name="expand">True</property>
            <property name="fill">True</property>
            <property name="position">5</property>
          </packing>
        </child>
        <child>
          <object class="GtkHBox" id="hbox1">
            <property name="visible">True</property>
            <property name="can_focus">False</property>
            <child>
              <object class="GtkButton" id="button_report">
                <property name="label" translatable="yes">Generate Report</property>
                <property name="visible">True</property>
                <property name="can_focus">True</property>
                <property name="receives_default">True</property>
                <property name="use_action_appearance">False</property>
              </object>
              <packing>
                <property name="expand">True</property>
                <property name="fill">True</property>
                <property name="position">0</property>
              </packing>
            </child>
            <child>
              <object class="GtkButton" id="button_exit">
                <property name="label" translatable="yes">Exit</property>
                <property name="visible">True</property>
                <property name="can_focus">True</property>
                <property name="receives_default">True</property>
                <property name="use_action_appearance">False</property>
              </object>
              <packing>
                <property name="expand">True</property>
                <property name="fill">True</property>
                <property name="position">1</property>
              </packing>
            </child>
          </object>
          <packing>
            <property name="expand">True</property>
            <property name="fill">True</property>
            <property name="position">6</property>
          </packing>
        </child>
      </object>
    </child>
  </object>
</interface>

report.glade
Code:
<?xml version="1.0" encoding="UTF-8"?>
<interface>
  <requires lib="gtk+" version="2.24"/>
  <!-- interface-naming-policy project-wide -->
  <object class="GtkListStore" id="liststore3">
    <columns>
      <!-- column-name col3 -->
      <column type="gchararray"/>
      <!-- column-name col4 -->
      <column type="gchararray"/>
      <!-- column-name col5 -->
      <column type="gchararray"/>
    </columns>
  </object>
  <object class="GtkWindow" id="window3">
    <property name="can_focus">False</property>
    <property name="window_position">center</property>
    <property name="default_width">600</property>
    <property name="default_height">200</property>
    <child>
      <object class="GtkScrolledWindow" id="scrolledwindow1">
        <property name="visible">True</property>
        <property name="can_focus">True</property>
        <property name="hscrollbar_policy">automatic</property>
        <property name="vscrollbar_policy">automatic</property>
        <child>
          <object class="GtkTreeView" id="treeview3">
            <property name="visible">True</property>
            <property name="can_focus">True</property>
            <property name="model">liststore3</property>
            <child>
              <object class="GtkTreeViewColumn" id="col4">
                <property name="resizable">True</property>
                <property name="sizing">autosize</property>
                <property name="title" translatable="yes">host_name</property>
                <property name="expand">True</property>
                <property name="clickable">True</property>
                <property name="reorderable">True</property>
                <property name="sort_indicator">True</property>
                <property name="sort_column_id">1</property>
                <child>
                  <object class="GtkCellRendererText" id="cell4"/>
                  <attributes>
                    <attribute name="height">8</attribute>
                    <attribute name="alignment">0</attribute>
                  </attributes>
                </child>
              </object>
            </child>
            <child>
              <object class="GtkTreeViewColumn" id="col3">
                <property name="resizable">True</property>
                <property name="sizing">autosize</property>
                <property name="title" translatable="yes">Reports</property>
                <property name="expand">True</property>
                <property name="clickable">True</property>
                <property name="reorderable">True</property>
                <property name="sort_indicator">True</property>
                <property name="sort_column_id">0</property>
                <child>
                  <object class="GtkCellRendererText" id="cell3"/>
                  <attributes>
                    <attribute name="yalign">1</attribute>
                    <attribute name="strikethrough">1</attribute>
                  </attributes>
                </child>
              </object>
            </child>
            <child>
              <object class="GtkTreeViewColumn" id="col5">
                <property name="resizable">True</property>
                <property name="sizing">autosize</property>
                <property name="title" translatable="yes">Message</property>
                <property name="expand">True</property>
                <property name="clickable">True</property>
                <property name="reorderable">True</property>
                <property name="sort_indicator">True</property>
                <property name="sort_column_id">0</property>
                <child>
                  <object class="GtkCellRendererText" id="cell5"/>
                  <attributes>
                    <attribute name="wrap-width">4</attribute>
                  </attributes>
                </child>
              </object>
            </child>
          </object>
        </child>
      </object>
    </child>
  </object>
</interface>

Like I said, its ugly ugly and I may go back and clean it up some time in the future.
 
Last edited:
This isnt really a script, but I created a nanorc for CFENGINE because I couldnt find one online.

It is far from perfect but its better than no highlighting at all

Code:
## Here is an example for cfengine files
##
syntax "cfengine" "\.cf"
## String highlighting.  You will in general want your comments and
## strings to come last, because syntax highlighting rules will be
## applied in the order they are read in.

#Declarations are denoted in red
color red "[^*]*[>]"

color yellow start="\<^(bundle)" end="(agent)\>|(common)\>|(edit_line)\>" start="\<^(body)" end="(control)\>|(acl)\>|(action)\>|(changes)\>|(classes)\>|(contain)\>|(copy_from)\>|(delete)\>|(edit_defaults)\>|(edit_field)\>|(file_select)\>|(location)\>|(match_value)\>|(mount)\>|(package_method)\>|(perms)\>|(process_count)\>|(process_select)\>|(rename)\>|(replace_with)\>|(service_method)\>|(volume)\>"
## 
color green "[^*]*[:]"
color cyan "[^>]*[;]"
#Change the background of terminating semicolons
color ,green ";"


## Comment highlighting
color brightblue "["][^"]*[^\\]"

#highlight variables
color ,cyan start="[\$\(]" end="(\))"
color ,white start="(\(\")" end="(\"\))"
 
Last edited:
I wrote another script this morning to take care of a minor annoyance that I deal with regularly: updating serial numbers in bind zone files.

Update: see my post down below for an updated version. The original is now in the spoiler tags below.
Code:
#!/bin/bash

cd /etc/bind

# get a list of the bind zone files
databases=`ls db.*`

# set color variables
colgreen=`tput setaf 2`
colred=`tput setaf 1`
colclear=`tput sgr0`

# execute for loop to increment the serial for each zone file
for i in ${databases}
do
	serial=`cat ${i} | grep serial | awk '{ print $1 }'`
	today=`date +"%Y%m%d"`

	if [[ ${serial} == "" ]]
	then
		echo -e "[${colgreen}OK${colclear}]  serial for ${i} does not exist, likely a default zone"
	else
		if [[ ${serial} == ${today}* ]]
		then
			# serial already has today's date; just increment
			newserial=`expr ${serial} + 1`
		else
			# serial needs to be updated to today's date and start at 01
			newserial="${today}01"
		fi

		sed -i "s/${serial}/${newserial}/g" ${i}

		checkserial=`cat ${i} | grep serial | awk '{ print $1 }'`

		if [[ ${checkserial} == ${newserial} ]]
		then
			echo -e "[${colgreen}OK${colclear}]  updated serial for ${i} to ${newserial}"
		else
			echo -e "[${colred}"'!!'"${colclear}]  serial update failed"
		fi
	fi
done

echo -e "\ndone."

This script basically reads all of the zone files for their existing serial numbers, compares that to today's date in the format in which is typically used in zone files and will either increment the value if it matches today's date or it will write a new serial # starting at '01'.
 
Last edited:
It is just more of a cover all. I realize there isn't really a point in updating the serials if there isn't any modification. I suppose I could have just added a check to see if the file was modified today but if I made multiple updates or updates to different zone files at different times, it would still update those. Any thoughts on how to programmatically update the serial? I realize that I could just modify the serial manually but who want to do anything manually? :p
 
I updated my script above accept a command line argument so that you could specify a specific zone file to have it's serial incremented. I also changed the phrases for [OK] and [!!] to be variables which makes the output in the script easier to read:

*edit: I've updated to script again. See the next post. Previous code in spoiler tags.
Code:
#!/bin/bash

# grab the first command line argument
db=${1}

cd /etc/bind

# set color variables
colgreen=`tput setaf 2`
colred=`tput setaf 1`
colclear=`tput sgr0`
OK="[${colgreen}OK${colclear}]"
error="[${colred}"'!!'"${colclear}]"

if [ -z ${db} ]
then
        # get a list of the bind zone files
        databases=`ls db.*`
else
        # check to see if the specified zone file exists
        if [ -f ${db} ]
        then
                # set the datbases list to the specific file
                databases=${db}
        else
                # notify user that the listed file doesn't exist and exit
                echo -e "${error}  ${db} is not a valid zone file"
                exit 1
        fi
fi

# execute for loop to increment the serial for each zone file
for i in ${databases}
do
        serial=`cat ${i} | grep serial | awk '{ print $1 }'`
        today=`date +"%Y%m%d"`

        if [[ ${serial} == "" ]]
        then
                echo -e "${OK}  serial for ${i} does not exist, likely a default zone"
        else
                if [[ ${serial} == ${today}* ]]
                then
                        # serial already has today's date; just increment
                        newserial=`expr ${serial} + 1`
                else
                        # serial needs to be updated to today's date and start at 01
                        newserial="${today}01"
                fi

                sed -i "s/${serial}/${newserial}/g" ${i}

                checkserial=`cat ${i} | grep serial | awk '{ print $1 }'`

                if [[ ${checkserial} == ${newserial} ]]
                then
                        echo -e "${OK}  updated serial for ${i} to ${newserial}"
                else
                        echo -e "${error}  serial update failed"
                fi
        fi
done

echo -e "\ndone."

Example:
Code:
root@neptune:~# ~/scripts/bind_increment_serials db.overclockix.com
[OK]  updated serial for db.overclockix.com to 2013121206

done.
 
Last edited:
I updated the bind_increment_serials script to maintain a set of files that store the previous md5sum of the zone file. On the first run, it will create the directory and md5 files while updating all of the serials in the zone files by default. On next run, it will use the .md5 files to check to see if there have been any changes to the zone file. There are also a couple of configuration settings that will add additional functionality. This includes showing the default zone files which do not have serials as well as reloading the zone files.

You can still specify a single zone file to update if you want but it will not increment the zone file's serial if there have not been any changes.

Code:
#!/bin/bash

# grab the first command line argument
db=${1}

# set the directory used to store md5s
bis_dir="/var/run/bind_increment_serials"

# show results of default zones; true or false (1 or 0)
show_dz="0"

# reload zone files after updates; true or false (1 or 0)
reload_zones="1"

if [ ! -d ${bis_dir} ]
then
        mkdir ${bis_dir}
fi

cd /etc/bind

# set color variables
colgreen=`tput setaf 2`
colcyan=`tput bold ; tput setaf 6`
colred=`tput setaf 1`
colclear=`tput sgr0`
OK="[${colgreen}OK${colclear}]"
NA="[${colcyan}NA${colclear}]"
error="[${colred}"'!!'"${colclear}]"

if [ -z ${db} ]
then
        # get a list of the bind zone files
        databases=`ls db.*`
else
        # check to see if the specified zone file exists
        if [ -f ${db} ]
        then
                # set the datbases list to the specific file
                databases=${db}
        else
                # notify user that the listed file doesn't exist and exit
                echo -e "${error}  ${db} is not a valid zone file"
                exit 1
        fi
fi

# execute for loop to increment the serial for each zone file
for i in ${databases}
do
        serial=`cat ${i} | grep serial | awk '{ print $1 }'`
        today=`date +"%Y%m%d"`

        if [[ ${serial} == "" ]]
        then
                if [ ${show_dz} == 1 ]
                then
                        echo -e "${NA}  serial for ${i} does not exist, likely a default zone"
                fi
        else
                if [ -f ${bis_dir}/${i}.md5 ]
                then
                        previous_md5=`cat ${bis_dir}/${i}.md5 | awk '{ print $1 }'`
                        current_md5=`/usr/bin/md5sum ${i} | awk '{ print $1 }'`

                        if [ "${previous_md5}" == "${current_md5}" ]
                        then
                                update=0
                        else
                                update=1
                        fi
                else
                        update=1
                fi

                if [ ${update} == 1 ]
                then
                        # update serial
                        if [[ ${serial} == ${today}* ]]
                        then
                                # serial already has today's date; just increment
                                newserial=`expr ${serial} + 1`
                        else
                                # serial needs to be updated to today's date and start at 01
                                newserial="${today}01"
                        fi

                        sed -i "s/${serial}/${newserial}/g" ${i}

                        checkserial=`cat ${i} | grep serial | awk '{ print $1 }'`

                        if [[ ${checkserial} == ${newserial} ]]
                        then
                                echo -e "${OK}  updated serial for ${i} to ${newserial}"
                                echo `/usr/bin/md5sum ${i}` > ${bis_dir}/${i}.md5
                        else
                                echo -e "${error}  serial update failed"
                        fi
                else
                        # do not update serial
                        echo -e "${OK}  no change detected in ${i}; skipping"
                fi


        fi
done

if [ ${reload_zones} == 1 ]
then
        echo -e "\nReloading zone files..."
        /usr/sbin/rndc reload
fi

echo -e "\ndone."
 
The following is a python deployment script for dealing with tomcat 6 and 7.

Code:
#!/usr/bin/python
# Owner: Steve Ovens
# Date Created: Oct 24, 2013
# Updated: Oct 9, 2014
# Added multiple configuration inputs
# Added better error handling so that it suppresses the python stack trace, or compliments it with
# a plain english explaination
# Primary Function: Deploys Warfiles to various environments
# This script is designed to work with tomcat 6 or 7. Most failures are caused by a problem
# with tomcat-users.xml.
# It is the intent of this script to abstract all variables into a config file.
# No one should have to alter this script ever, save for bug fixes or updates.
# All client updates should be done through the generation of new config files
# This script makes use of classes and OOP style programming.


import sys
import os
import subprocess
import shutil
import datetime
import time
import requests
import json
import pytz

try:
    import paramiko
except:
    print("""This script requires the module 'paramiko' to be installed. Use
'sudo pip install paramiko' to install, and then try again""")
    sys.exit()
try:
    import magic
except:
    print("""This script requires the module 'python-magic' to be installed. Use
'sudo pip install python-magic' to install, and then try again""")
    sys.exit()

todays_date = str(datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d_%H:%M'))

# This is the initial error checking. It checks to see that the input files are text files
# and that the text files have the words 'deploy_warfiles.py' in the first line

try:
    # This section will allow for a variable amount of config files to be passed in
    counter = 0
    PARAMETER_FILE_LIST = []
    while counter < len(sys.argv):
        # We are excluding sys.argv[0] as this would be the script name itself
        if counter == 0:
            pass
        else:
            # This section is checking for the words deploy_warfiles.py. This is a header that will have
            # To be present in the warfile config file
            if "ASCII" in (magic.from_file(sys.argv[counter])):
                is_this_my_config = open(sys.argv[counter], 'r')
                first_line = is_this_my_config.readline()
                if "deploy_warfiles.py" in first_line:
                    print("header has valid key word, assuming this is a valid config")
                else:
                    print("This is a text file but does not appear to be a valid warfile deployment config")
                    sys.exit()
            # If the file isnt a text file, abort
            else:
                print("This does not appear to be a text file/valid config file...ABORT")
                sys.exit()
            PARAMETER_FILE_LIST.append(sys.argv[counter])
        counter += 1
    # I am dynamically initializing blank arrays based on the number of the config files passed in
    REMOTE_WARFILES = [[] for x in range(0, len(PARAMETER_FILE_LIST))]
    LOCAL_WARFILES = [[] for x in range(0, len(PARAMETER_FILE_LIST))]
except:
    print("""
    USAGE: This script expects at least one config file passed in as an argument

    I.E: ./deploy_warfiles.py config_file
    """)
    sys.exit()

# This class will handle the parsing of the config file
class ParseDeploymentParameters:

    def setDeploymentParameters(self, config_file):
        # These are being initialized blank so that they are over written each time this function is called
        # This is what allows multiple configs to be passed in
        self._WARFILE_LIST = []
        self._SERVER_LIST = []
        self.INCOMING_CONFIG_FILE = config_file
        for line in open(self.INCOMING_CONFIG_FILE).readlines():
            # Ignore blank spaces, this will only process lines with which have text
            if line.strip():
                if not line.startswith("#"):
                    value = line.split("=")[1].strip()
            if line.startswith("PATH_TO_WARFILE"):
                self.WARFILE_PATH = value
            elif line.startswith("OLD_WARFILE_PATH"):
                self.OLD_WARFILE_PATH = value
            elif line.startswith("WARFILE_NAME"):
                self._WARFILE_LIST.append(value)
            elif line.startswith("SERVERS"):
                self._SERVER_LIST.append(value)
            elif line.startswith("TOMCAT_PORT"):
                self.TOMCAT_PORT = value
            elif line.startswith("RESTART_TOMCAT"):
                self.RESTART_TOMCAT = value
            elif line.startswith("TOMCAT_VERSION"):
                self.TOMCAT_VERSION = value
            elif line.startswith("SSH_USER"):
                self.SSH_USER = value
            elif line.startswith("MOVE_FILE"):
                self.MOVE_FILE = value
            elif line.startswith("TOMCAT_RESTART_SCRIPT"):
                self.TOMCAT_RESTART_SCRIPT = value
            elif line.startswith("TOMCATUSER"):
                self.TOMCATUSER = value
            elif line.startswith("TOMCATPASS"):
                self.TOMCATPASS = value
            elif line.startswith("NAG_START"):
                self.NAG_START = value
            elif line.startswith("NAG_STOP"):
                self.NAG_STOP = value
            elif line.startswith("NAGIOS_SERVER"):
                self.NAGIOS_SERVER = value
            elif line.startswith("TOMCAT_DIRECTORY"):
                self.TOMCAT_DIRECTORY = value
            elif line.startswith("CPCODE_FILE"):
                self.CPCODE_FILE = value
            elif line.startswith("AKAMAI_CRED_FILE"):
                self.AKAMAI_CRED_FILE = value


class sshConnections:
    # This class allows for easier multiple connections. The problem is because /etc/init.d/tomcat restart
    # Sometimes does not wait long enough between stop and start functions. As a result, tomcat may stay down
    # To remedy this, this class will open multiple connections inserting a 20 second pause between connections
    # Hopefully this will allow most instances of tomcat to shutdown gracefully before restarting
    def open_ssh(self, server, user_name):
        self.ssh = paramiko.SSHClient()
        self.ssh.load_system_host_keys()
        self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
        self.ssh.connect(server, username = user_name, timeout=240)
        self.transport = self.ssh.get_transport()
        self.psuedo_tty = self.transport.open_session()
        self.psuedo_tty.get_pty()
        self.read_tty = self.psuedo_tty.makefile()

    def close_ssh(self):
        self.read_tty.close()
        self.psuedo_tty.close()
        self.ssh.close()
        time.sleep(2)

    def try_ssh(self, server_name, parameters):
        try:
                connection_attempt = 0
                try:
                    return self.open_ssh(server_name, parameters)
                except:
                    try:
                        return self.open_ssh(server_name, parameters)
                    except:
                        print("There was a problem connecting to %s" % server_name)
                        while connection_attempt < 4:
                            try:
                                connection_attempt += 1
                                print("Making attempt %s" % (connection_attempt + 1))
                                return self.open_ssh(server_name, parameters)
                            except:
                                pass
        except:
            print("Connection to remote server failed after multiple attempts")


class StartDeployment:

    def deployment_commands(self, tomcat_version, tomcat_user, tomcat_password, tomcat_port,
     warfile_path, server_hostname, warfile_name, deployment_path, UNDEPLOY_COMMAND, DEPLOY_COMMAND):
        # This section toggles the commands required to deploy to tomcat 6 or 7
        if "7" in warfile_parameters.TOMCAT_VERSION:
            DEPLOY_COMMAND = '/usr/bin/curl -u%s:%s --anyauth --upload-file %s --url \
            "http://%s:%s/manager/text/deploy?%s" -w "Deployed %s"' % (tomcat_user, tomcat_password,
             warfile_path, server_hostname, tomcat_port, deployment_path, warfile_name)
            UNDEPLOY_COMMAND = '/usr/bin/curl -u%s:%s --url "http://%s:%s/manager/text/undeploy?%s" \
             -w "Deleted %s "' % (tomcat_user, tomcat_password, server_hostname,
             tomcat_port, deployment_path, warfile_name)
        else:
            DEPLOY_COMMAND = '/usr/bin/curl -u%s:%s --anyauth --form deployWar=@%s \
            --url http://%s:%s/manager/html/upload -w "Deployed %s "' % (tomcat_user, tomcat_password,
            warfile_path, server_hostname, tomcat_port, warfile_name)
            UNDEPLOY_COMMAND = '/usr/bin/curl -u%s:%s --url http://%s:%s/manager/html/undeploy -d %s \
            -w "Deleted %s "' % (tomcat_user, tomcat_password, server_hostname, tomcat_port,
            deployment_path, warfile_name)
        return (UNDEPLOY_COMMAND, DEPLOY_COMMAND)

    def begin_deployment(self, restart_script, config_counter):
        # These lists are to show all of the hashes
        self.predeployed_warfile_hashes = []

        # TOMCAT_PORT specifies the naming convention of tomcat. Some servers it may be 8585, 9090, 8080 etc.
        try:
            TOMCAT_PORT = warfile_parameters.TOMCAT_PORT
        except:
            TOMCAT_PORT = 8080

        # Some environments have special restart scripts, others just use the tomcat init scripts
        # This is to determine which will be used
        if "init" in warfile_parameters.TOMCAT_RESTART_SCRIPT:
            self.STOP_COMMAND = "%s stop" % (restart_script)
            self.START_COMMAND = "%s start" % (restart_script)
        else:
            self.RESTART_COMMAND = restart_script
        for WARFILE in warfile_parameters._WARFILE_LIST:
            WARFILE_PATH = warfile_parameters.WARFILE_PATH + os.sep + WARFILE
            # Set the deploy path for tomcat curl deploys
            DEPLOY_PATH = "path=/%s" % WARFILE.replace("#", "/").replace(".war", "")
            # Only continue processing if the warfile listed in the config file exists
            if os.path.exists(WARFILE_PATH):
                # Get the predeployment warfile hash
                hash_file = os.popen("md5sum %s" % WARFILE_PATH).read()
                self.predeployed_warfile_hashes.append(hash_file)
                for EACH_SERVER in warfile_parameters._SERVER_LIST:
                    try:
                        if hasattr(warfile_parameters, "NAG_STOP"):
                            print(("Turning of Nagios on host %s" % EACH_SERVER))
                            os.popen("%s %s %s" % (warfile_parameters.NAG_STOP, EACH_SERVER,
                            warfile_parameters.NAGIOS_SERVER)).read()
                        UNDEPLOY_COMMAND, DEPLOY_COMMAND = self.deployment_commands(warfile_parameters.TOMCAT_VERSION,
                        warfile_parameters.TOMCATUSER, warfile_parameters.TOMCATPASS, warfile_parameters.TOMCAT_PORT,
                        WARFILE_PATH, EACH_SERVER, WARFILE, DEPLOY_PATH, UNDEPLOY_COMMAND = "", DEPLOY_COMMAND = "")
                        print("")
                        print("=======================================")
                        print(("Beginning Undeploy of old version of %s to %s on port %s" %
                        (WARFILE, EACH_SERVER, TOMCAT_PORT)))
                        print("=======================================")
                        self.check_curl_success(UNDEPLOY_COMMAND, EACH_SERVER)
                        print("")
                        print("=======================================")
                        print(("Beginning Deploy of %s to %s on port %s" % (WARFILE, EACH_SERVER,
                        TOMCAT_PORT)))
                        print("=======================================")
                        print("")
                        # Run the deploy command through the check
                        did_curl_fail = self.check_curl_success(DEPLOY_COMMAND, EACH_SERVER)
                        # If the deploy fails, assume its a network error, and retry up to 3 times
                        # It will wait 10 seconds before retrying the curl
                        if did_curl_fail == "yes":
                            retry_count = 0
                            while retry_count != 3:
                                print("")
                                print(("Curl Deployment failed, retrying in 10 seconds.\
                                This is attempt number %s" % (retry_count + 1)))
                                print("")
                                time.sleep(10)
                                self.check_curl_success(DEPLOY_COMMAND, EACH_SERVER)
                                retry_count += 1
                            print("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!")
                            print(("I was unable to reach %s, the deployment failed" % EACH_SERVER))
                    except Exception as e:
                        print(e)
                        pass
                try:
                    if "y" in warfile_parameters.MOVE_FILE.lower():
                        shutil.move('%s' % (WARFILE_PATH), '%s/%s.%s' %
                        (warfile_parameters.OLD_WARFILE_PATH, WARFILE, todays_date))
                except Exception, e:
                    print("")
                    if "Permission denied:" in str(e):
                        print("""There is a permission problem with the directory.
Unable to move warfile to %s""" % warfile_parameters.OLD_WARFILE_PATH)
                        sys.exit()
        self.verify_warfiles(config_counter)
        # This is another work-around to change the output to the terminal
        LOCAL_WARFILES[config_counter] = self.predeployed_warfile_hashes

class curlWarfile:


    def verify_warfiles(self, config_counter):
        deployed_war_hashes = []
        # This section needs work. We dont have a unified restart script yet to describe all the various
        # environments so this will be a work in progress. Maybe I should move this out to the config file
        # In addition to restarting tomcat, it also grabs an md5 hash of the warfiles on the server
        if "y" in warfile_parameters.RESTART_TOMCAT.lower():
            for EACH_SERVER in warfile_parameters._SERVER_LIST:
                print("")
                print("=======================================")
                print(("Restarting tomcat on %s with this command: %s" % (EACH_SERVER,
                warfile_parameters.TOMCAT_RESTART_SCRIPT)))
                SSH = sshConnections()
                SSH.try_ssh(EACH_SERVER, warfile_parameters.SSH_USER, SSH)
                counter = 0
                # Loop through all of the files in the remote webapps directory
                while counter < len(deployment.predeployed_warfile_hashes):
                    stdin, stdout, stderr = SSH.ssh.exec_command("md5sum %s/webapps/%s" %
                    (warfile_parameters.TOMCAT_DIRECTORY,
                    deployment.predeployed_warfile_hashes[counter].split("/")[-1]))
                    md5_output = stdout.readlines()
                    # The md5 output is returned as a tupple, so I am converting it to a string before adding
                    # to the list
                    addme = "".join(md5_output) + "\n"
                    deployed_war_hashes.append(addme.rstrip())
                    counter += 1
                # I am inserting the the server name in front of each set of warfile hashes
                deployed_war_hashes.insert((len(deployed_war_hashes) - len(deployment.predeployed_warfile_hashes)),
                     (EACH_SERVER + ": "))
                # NOHUP was added to this section because sometimes the ssh session closes before
                # the restart command has finished executing causing the services to stay down
                if hasattr(deployment, "RESTART_COMMAND"):
                    SSH.psuedo_tty.exec_command("sudo nohup %s >/dev/null 2>&1" % deployment.RESTART_COMMAND)
                    SSH.psuedo_tty.recv(1024)
                    SSH.close_ssh()
                else:
                    print("Stopping tomcat")
                    SSH.psuedo_tty.exec_command("sudo nohup %s >/dev/null 2>&1" % deployment.STOP_COMMAND)
                    SSH.psuedo_tty.recv(1024)
                    SSH.close_ssh()
                    print("Sleeping for 10, then restarting tomcat")
                    time.sleep(10)                  
                    SSH = sshConnections()
                    SSH.try_ssh(EACH_SERVER, warfile_parameters.SSH_USER, SSH)
                    SSH.psuedo_tty.exec_command("sudo nohup %s >/dev/null 2>&1" % deployment.START_COMMAND)
                    SSH.psuedo_tty.recv(1024)
                    SSH.close_ssh()
                if hasattr(warfile_parameters, "NAG_START"):
                    print("Re-enabling Nagios on host %s" % EACH_SERVER)
                    os.popen("%s %s %s" % (warfile_parameters.NAG_START, EACH_SERVER,
                    warfile_parameters.NAGIOS_SERVER)).read()
        print("=======================================")
        print("")
        # This was added as a hack to get the output for multiple scripts to be formatted
        REMOTE_WARFILES[config_counter] = deployed_war_hashes

    def check_curl_success(self, command, SERVER_NAME):
        counter = 0
        # Curl uses stderr to show its progress so subprocess is required to capture this output
        for response_from_curl_attempt in subprocess.Popen(command, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE,
             shell=True).stderr.read().split("\n"):
            counter += 1
            print(response_from_curl_attempt.split("\n")[0].rstrip())
            if counter == 3:
                # Because the curl command constantly updates this line until the transfer is complete
                # The output is simply appended to the list. Therefore the most reliable way to determine
                # Whether the file has transfered is to get the 8th to last column which is labeled as 'Xferd'
                if "refused" in response_from_curl_attempt:
                    print("""
The connection to the server %s was refused... is the port closed?
""" % SERVER_NAME)
                    sys.exit()
                if response_from_curl_attempt == "":
                    print("""
The connection to the server %s was refused... is the port closed?
""" % SERVER_NAME)
                    sys.exit()

                if int(response_from_curl_attempt.split("\n")[0].rstrip().split()[-8]) == 0:
                    curl_fail = "yes"
                else:
                    curl_fail = "no"
                return curl_fail


# If the environment requires an Akamai purge, this section will activate
# This section requires that the django_akamai egg be installed
class purgeAkamai:
    def __init__(self):

        try:
            user_file = open(warfile_parameters.AKAMAI_CRED_FILE).readlines()
            cp_code_file = open(warfile_parameters.CPCODE_FILE).readlines()
        except:
            print("There was a problem with the akamai clearing")
            sys.exit()

        # Gather the user name and password from the user_file
        for line in user_file:
            if "username" in line:
                username = line.split("=")[1].strip()
            if "password" in line:
                password = line.split("=")[1].strip()

        # Since most of the servers have been converted to GMT, report time in GMT
        timezone = pytz.timezone("GMT")
        purge_date = datetime.datetime.now(timezone).strftime("%Y-%m-%d-%H:%M")

        # These urls were obtained from https://api.ccu.akamai.com/ccu/v2/docs/index.html
        akamai_base_url = "https://api.ccu.akamai.com"
        akamai_clear_url = akamai_base_url + "/ccu/v2/queues/default"

        credentials = (username, password)

        # These headers are important as they declare the post type to be json which akamai requires
        akamai_headers = {'Content-type': 'application/json', 'Accept': 'text/plain'}

        ###################### This is the purge section ######################

        counter = 0
        cp_code_list = ''
        for individual_cpcode in cp_code_file:
            # Ignore commented lines
            if not individual_cpcode.startswith("#"):
                counter += 1
                # The rest API requires a comma separated list for CPCodes
                # This section makes sure that all but the last CPCode has a comma after it
                if counter < len(cp_code_file):
                    cp_code_list += '"%s",' % individual_cpcode.strip()
                else:
                    cp_code_list += '"%s"' % individual_cpcode.strip()

        # Construct the dict to send to akamai, it should look something like this:
        # { "type" : "cpcode", "objects" : [ "334455" ] }
        data = '{"objects": [%s], "type": "cpcode"}' % cp_code_list

        # Send the purge request
        request_clear = requests.post(akamai_clear_url, data=data, auth=credentials, headers=akamai_headers)

        # I am turning the json object into a python dictionary so I can extract the Uri
        akamai_response_to_clear_request = json.loads(request_clear.text)

        print(("Time until purge completion: " + str(akamai_response_to_clear_request["estimatedSeconds"])))
        print(("Status: " + akamai_response_to_clear_request["detail"]))
        akamai_purge_url = akamai_base_url + akamai_response_to_clear_request["progressUri"]

        # This step is not needed because all it does is checks the status.
        # However it is a good way to verify that the request was sent and is being processed properly
        request_status = requests.get(akamai_purge_url, auth=credentials, headers=akamai_headers)

        response_to_status_request = json.loads(request_status.text)
        print("")
        print(("Submitted by: " + response_to_status_request["submittedBy"]))
        print(("Purge ID: " + response_to_status_request["purgeId"]))
        print(("Status: " + response_to_status_request["purgeStatus"]))
        print("")

        print(("The original purge request was sent on: " + purge_date))

        # Create the file so that we can check later
        request_file = open("/tmp/check_akamai_status", "w")
        request_file.write("#This is the PROGRESS URI of the request sent on %s GMT\n" % purge_date)
        request_file.write(akamai_response_to_clear_request["progressUri"])
        request_file.close()


# This section will go through the array of input files
# And execute on each individually
counter = 0
for PARAM_FILES in PARAMETER_FILE_LIST:
    warfile_parameters = ParseDeploymentParameters()
    warfile_parameters.setDeploymentParameters(PARAM_FILES)
    deployment = curlWarfile()
    deployment.begin_deployment(warfile_parameters.TOMCAT_RESTART_SCRIPT, counter)
    counter += 1
    if hasattr(warfile_parameters, "CPCODE_FILE"):
        purgeAkamai()


#Display the warfile hashes after the deploy is completed
print("The predeployed (local) warfiles have the following hashes:\n")
warfile_counter = 0
input_file_counter = 0
for predeployed_hash_list in LOCAL_WARFILES:
    print(PARAMETER_FILE_LIST[input_file_counter])
    for individual_hash in predeployed_hash_list:
        print(individual_hash.strip())
        warfile_counter += 1
        if warfile_counter == (len(predeployed_hash_list)):
            print("")
            warfile_counter = 0
            input_file_counter += 1

print("")
print("These are the files which were deployed to the WEBAPPS directory: \n ")

counter = 0
for remote_warfile_list in REMOTE_WARFILES:
    for deployed_hash in remote_warfile_list:
        print deployed_hash
        counter += 1
        if counter == (len(remote_warfile_list)):
            print("")
            counter = 0

The companion file for this looks like this

Code:
PATH_TO_WARFILE = 
OLD_WARFILE_PATH =
MOVE_FILE =  
WARFILE_NAME = 
WARFILE_NAME = 

SERVERS = 
SERVERS = 
SSH_USER =
RESTART_TOMCAT = yes

TOMCAT_RESTART_SCRIPT = /etc/init.d/tomcat
TOMCAT_PORT = 8080
TOMCAT_VERSION = 7
TOMCATUSER = 
TOMCATPASS = 
TOMCAT_DIRECTORY = /usr/local/tomcat

NAGIOS_SERVER = 
NAG_START = 
NAG_STOP =

For obvious reasons, I have removed the values for this companion files. Hopefully they are self evident
 
Last edited:
Thought I would throw this up here as well... Its a bit hackish but anyone looking for a way to clear Akamai using their REST API can take a go with this

Code:
#!/usr/bin/python
# This program was designed to clear all the cache automatically
# It will clear the akamai cache after a certain amount of time.
# It uses Akamai's new REST API and therefore requires the modules
# 'requests' and 'json' to work properly
# Written by stratus_ss
# Created on: July 2014

import requests
import json
import sys
import datetime
import pytz

#Try except block that will bail if either of the inputs are missing or fail to open
try:
    user_file = open(sys.argv[1]).readlines()
    input_file = open(sys.argv[2]).readlines()
    #This section is a bit of a hack. I am testing to see if the second input file
    #Has the header indicating a previous purge request
    for line in input_file:
        if "PROGRESS URI" in line:
            #The status check contains the date of the original purge request
            status_check = str(line.split("on ")[1])
        if not line.startswith("#") and 'status_check' in globals():
            check_akamai_status = line.strip()
    #I am checking the list of global variables. If I cant find the check_akamai_status
    #assume that this is a new purge request and the second argument is the cp code list        
    if not 'check_akamai_status' in globals():
        cp_code_file = input_file
except:
    print("This script expects the user_file as the first argument and the cpcode file as the second argument")
    print("I.E. ./purge_akamai_restful.py user_file CB_uat_cpcodes")
    print("or ./purge_akamai_restful.py user_file /tmp/check_akamai_status")
    sys.exit()

#Gather the user name and password from the user_file
for line in user_file:
    if "username" in line:
        username = line.split("=")[1].strip()
    if "password" in line:
        password = line.split("=")[1].strip()

#Since most of the servers have been converted to GMT, report time in GMT
timezone = pytz.timezone("GMT")
todays_date = datetime.datetime.now(timezone).strftime("%Y-%m-%d-%H:%M")

#These urls were obtained from https://api.ccu.akamai.com/ccu/v2/docs/index.html
akamai_base_url = "https://api.ccu.akamai.com"
akamai_clear_url = akamai_base_url + "/ccu/v2/queues/default"

credentials = (username, password)

#These headers are important as they declare the post type to be json which akamai requires
akamai_headers = {'Content-type': 'application/json', 'Accept': 'text/plain'}

###################### This is the purge section ######################
#Only do this if we have a cp_code_file

if 'cp_code_file' in globals():
    counter = 0
    cp_code_list = ''
    for individual_cpcode in cp_code_file:
        #Ignore commented lines
        if not individual_cpcode.startswith("#"):
            counter +=1
            #The rest API requires a comma separated list for CPCodes
            #This section makes sure that all but the last CPCode has a comma after it
            if counter < len(cp_code_file):
                cp_code_list += '"%s",' % individual_cpcode.strip()
            else:
                cp_code_list += '"%s"' % individual_cpcode.strip()
        
    #Construct the dict to send to akamai, it should look something like this:
    #{ "type" : "cpcode", "objects" : [ "334455" ] }
    data = '{"objects": [%s], "type": "cpcode"}' % cp_code_list

    #Send the purge request
    request_clear = requests.post(akamai_clear_url, data=data, auth=credentials, headers=akamai_headers)

    #I am turning the json object into a python dictionary so I can extract the Uri
    akamai_response_to_clear_request = json.loads(request_clear.text)

    print("Time until purge completion: " + str(akamai_response_to_clear_request["estimatedSeconds"]))
    print("Status: " + akamai_response_to_clear_request["detail"])
    akamai_purge_url = akamai_base_url + akamai_response_to_clear_request["progressUri"]

else:
    akamai_purge_url = akamai_base_url + check_akamai_status

#This step is not needed because all it does is checks the status.
#However it is a good way to verify that the request was sent and is being processed properly
request_status = requests.get(akamai_purge_url, auth=credentials, headers=akamai_headers)

response_to_status_request = json.loads(request_status.text) 
print("")
print("Submitted by: " + response_to_status_request["submittedBy"])
print("Purge ID: " + response_to_status_request["purgeId"])
print("Status: " + response_to_status_request["purgeStatus"])
print("")

if not 'cp_code_file' in globals():
    print("The original purge request was sent on: " + status_check)
    
if 'cp_code_file' in globals():
    #Create the file so that we can check later
    request_file = open("/tmp/check_akamai_status", "w")
    request_file.write("#This is the PROGRESS URI of the request sent on %s GMT\n" % todays_date)
    request_file.write(akamai_response_to_clear_request["progressUri"])
    request_file.close()
 
Back