Mysql error-log monitoring using Shell script with mail alert

#!/bin/bash
SERNAME=xyz.kmi.com
SERPUBIP=192.168.72.50
SERVER=`hostname`
WEEKDAY=`date '+%w%H%M'`
DATE_VAR=`date '+%Y_%m_%d'`

Error_log_loc=/var/log
egrep 'Error' $Error_log_loc/mysqld.log |sort -u > $Error_log_loc/mysqld_ALERTLOG.txt
cat $Error_log_loc/mysqld.log >> $Error_log_loc/mysqld_archived.log

cat /dev/null > $Error_log_loc/mysqld.log

if [ -s "$Error_log_loc/mysqld.log" ] ; then
cat $Error_log_loc/mysqld_ALERTLOG.txt | mail -s "URGENT -ERROR in Oracle Alert Log File for $SERNAME ($SERPUBIP) at `date` " ssdas@gmail.com
fi

# Weekly alert log datestamp and compress (Sunday 00:15)

sysdate=`date | awk '{ print $3}'`
if [[ $sysdate -eq 4 || $sysdate -eq 11 || $sysdate -eq 18 || $sysdate -eq 25 ]] ; then
mv $Error_log_loc/mysqld_archived.log $Error_log_loc/mysqld_archived_ALERTLOG_${DATE_VAR}.log
gzip $Error_log_loc/mysqld_archived_ALERTLOG_${DATE_VAR}.log
fi
exit 0

Batch script to take database users backup in sql server

SQL Server Version:- SQL 2014

Create a batch file which will be scheduled on task scheduler.

set backuplogfilename=%date:~-7,2%-%date:~-10,2%-%date:~-4,4%-0%time:~1,1%%time:~3,2%%time:~6,2%
SQLCMD.EXE -S localhost -U sa -P "sa@123" -i "E:\users.sql"  >> "E:\users_%backuplogfilename%.log"
-- save and exit

Now schedule the above .bat file in task scheduler


Create a sql file lets say users.sql
Content of users.sql

USE master
GO
IF OBJECT_ID ('sp_hexadecimal') IS NOT NULL
  DROP PROCEDURE sp_hexadecimal
GO
CREATE PROCEDURE sp_hexadecimal
    @binvalue varbinary(256),
    @hexvalue varchar (514) OUTPUT
AS
DECLARE @charvalue varchar (514)
DECLARE @i int
DECLARE @length int
DECLARE @hexstring char(16)
SELECT @charvalue = '0x'
SELECT @i = 1
SELECT @length = DATALENGTH (@binvalue)
SELECT @hexstring = '0123456789ABCDEF'
WHILE (@i <= @length)
BEGIN
  DECLARE @tempint int
  DECLARE @firstint int
  DECLARE @secondint int
  SELECT @tempint = CONVERT(int, SUBSTRING(@binvalue,@i,1))
  SELECT @firstint = FLOOR(@tempint/16)
  SELECT @secondint = @tempint - (@firstint*16)
  SELECT @charvalue = @charvalue +
    SUBSTRING(@hexstring, @firstint+1, 1) +
    SUBSTRING(@hexstring, @secondint+1, 1)
  SELECT @i = @i + 1
END

SELECT @hexvalue = @charvalue
GO

IF OBJECT_ID ('sp_help_revlogin') IS NOT NULL
  DROP PROCEDURE sp_help_revlogin
GO
CREATE PROCEDURE sp_help_revlogin @login_name sysname = NULL AS
DECLARE @name sysname
DECLARE @type varchar (1)
DECLARE @hasaccess int
DECLARE @denylogin int
DECLARE @is_disabled int
DECLARE @PWD_varbinary  varbinary (256)
DECLARE @PWD_string  varchar (514)
DECLARE @SID_varbinary varbinary (85)
DECLARE @SID_string varchar (514)
DECLARE @tmpstr  varchar (1024)
DECLARE @is_policy_checked varchar (3)
DECLARE @is_expiration_checked varchar (3)

DECLARE @defaultdb sysname

IF (@login_name IS NULL)
  DECLARE login_curs CURSOR FOR

      SELECT p.sid, p.name, p.type, p.is_disabled, p.default_database_name, l.hasaccess, l.denylogin FROM
sys.server_principals p LEFT JOIN sys.syslogins l
      ON ( l.name = p.name ) WHERE p.type IN ( 'S', 'G', 'U' ) AND p.name <> 'sa'
ELSE
  DECLARE login_curs CURSOR FOR


      SELECT p.sid, p.name, p.type, p.is_disabled, p.default_database_name, l.hasaccess, l.denylogin FROM
sys.server_principals p LEFT JOIN sys.syslogins l
      ON ( l.name = p.name ) WHERE p.type IN ( 'S', 'G', 'U' ) AND p.name = @login_name
OPEN login_curs

FETCH NEXT FROM login_curs INTO @SID_varbinary, @name, @type, @is_disabled, @defaultdb, @hasaccess, @denylogin
IF (@@fetch_status = -1)
BEGIN
  PRINT 'No login(s) found.'
  CLOSE login_curs
  DEALLOCATE login_curs
  RETURN -1
END
SET @tmpstr = '/* sp_help_revlogin script '
PRINT @tmpstr
SET @tmpstr = '** Generated ' + CONVERT (varchar, GETDATE()) + ' on ' + @@SERVERNAME + ' */'
PRINT @tmpstr
PRINT ''
WHILE (@@fetch_status <> -1)
BEGIN
  IF (@@fetch_status <> -2)
  BEGIN
    PRINT ''
    SET @tmpstr = '-- Login: ' + @name
    PRINT @tmpstr
    IF (@type IN ( 'G', 'U'))
    BEGIN -- NT authenticated account/group

      SET @tmpstr = 'CREATE LOGIN ' + QUOTENAME( @name ) + ' FROM WINDOWS WITH DEFAULT_DATABASE = [' + @defaultdb + ']'
    END
    ELSE BEGIN -- SQL Server authentication
        -- obtain password and sid
            SET @PWD_varbinary = CAST( LOGINPROPERTY( @name, 'PasswordHash' ) AS varbinary (256) )
        EXEC sp_hexadecimal @PWD_varbinary, @PWD_string OUT
        EXEC sp_hexadecimal @SID_varbinary,@SID_string OUT

        -- obtain password policy state
        SELECT @is_policy_checked = CASE is_policy_checked WHEN 1 THEN 'ON' WHEN 0 THEN 'OFF' ELSE NULL END FROM sys.sql_logins WHERE name = @name
        SELECT @is_expiration_checked = CASE is_expiration_checked WHEN 1 THEN 'ON' WHEN 0 THEN 'OFF' ELSE NULL END FROM sys.sql_logins WHERE name = @name

            SET @tmpstr = 'CREATE LOGIN ' + QUOTENAME( @name ) + ' WITH PASSWORD = ' + @PWD_string + ' HASHED, SID = ' + @SID_string + ', DEFAULT_DATABASE = [' + @defaultdb + ']'

        IF ( @is_policy_checked IS NOT NULL )
        BEGIN
          SET @tmpstr = @tmpstr + ', CHECK_POLICY = ' + @is_policy_checked
        END
        IF ( @is_expiration_checked IS NOT NULL )
        BEGIN
          SET @tmpstr = @tmpstr + ', CHECK_EXPIRATION = ' + @is_expiration_checked
        END
    END
    IF (@denylogin = 1)
    BEGIN -- login is denied access
      SET @tmpstr = @tmpstr + '; DENY CONNECT SQL TO ' + QUOTENAME( @name )
    END
    ELSE IF (@hasaccess = 0)
    BEGIN -- login exists but does not have access
      SET @tmpstr = @tmpstr + '; REVOKE CONNECT SQL TO ' + QUOTENAME( @name )
    END
    IF (@is_disabled = 1)
    BEGIN -- login is disabled
      SET @tmpstr = @tmpstr + '; ALTER LOGIN ' + QUOTENAME( @name ) + ' DISABLE'
    END
    PRINT @tmpstr
  END

  FETCH NEXT FROM login_curs INTO @SID_varbinary, @name, @type, @is_disabled, @defaultdb, @hasaccess, @denylogin
   END
CLOSE login_curs
DEALLOCATE login_curs
RETURN 0
GO

exec dbo.sp_help_revlogin
go

How to install single node hadoop cluster on Centos 6

What is hadoop?
Hadoop is an open-source framework to store and process Big Data in a distributed environment. It contains two modules
one is MapReduce and another is Hadoop Distributed File System (HDFS).


•MapReduce: It is a parallel programming model for processing large amounts of structured, semi-structured, and
unstructured data on large clusters of commodity hardware.

•HDFS:Hadoop Distributed File System is a part of Hadoop framework, used to store and process the datasets. It
provides a fault-tolerant file system to run on commodity hardware.

Hostname:- server1.soumya.com
OS:- Centos 6

Step 1: Install Java
Download the java

[root@server1 ~]# wget --no-cookies --no-check-certificate --header "Cookie: gpw_e24=http%3A%2F%2Fwww.oracle.com%2F; oraclelicense=accept-securebackup-cookie" "http://download.oracle.com/otn-pub/java/jdk/8u101-b13/jdk-8u101-linux-x64.tar.gz"
[root@server1 ~]# tar zxvf jdk-8u101-linux-x64.tar.gz

Step 2:-Install Java with Alternatives

After extracting archive file use alternatives command to install it. alternatives command is available in chkconfig
package.

[root@server1 jdk1.8.0_101]# alternatives --install /usr/bin/java java /u01/jdk1.8.0_101/bin/java 2
[root@server1 jdk1.8.0_101]# alternatives --config java

There are 4 programs which provide 'java'.

  Selection    Command
-----------------------------------------------
   1           /usr/lib/jvm/jre-1.5.0-gcj/bin/java
*+ 2           /usr/lib/jvm/jre-1.7.0-openjdk.x86_64/bin/java
   3           /usr/lib/jvm/jre-1.6.0-openjdk.x86_64/bin/java
   4           /u01/jdk1.8.0_101/bin/java

Enter to keep the current selection[+], or type selection number: 2

Now java8 has been installed.Its recommended to setup javac and jar commands path using alternatives

[root@server1 jdk1.8.0_101]# alternatives --install /usr/bin/jar jar //u01/jdk1.8.0_101/bin/jar 4
[root@server1 jdk1.8.0_101]# alternatives --install /usr/bin/javac javac /u01/jdk1.8.0_101//bin/javac 4
[root@server1 jdk1.8.0_101]# alternatives --set jar /u01/jdk1.8.0_101/bin/jar
[root@server1 jdk1.8.0_101]# alternatives --set javac /u01/jdk1.8.0_101/bin/javac

Now check the java version:-
[root@server1 alternatives]# java -version
java version "1.8.0_101"
Java(TM) SE Runtime Environment (build 1.8.0_101-b13)
Java HotSpot(TM) 64-Bit Server VM (build 25.101-b13, mixed mode

Step 3:-
Now configure the enviroment variables:-

# export JAVA_HOME=/u01/jdk1.8.0_101
Setup JRE_HOME Variable
# export JRE_HOME=/u01/jdk1.8.0_101/jre
Setup PATH Variable
# export PATH=$PATH:/u01/jdk1.8.0_101/bin:/u01/jdk1.8.0_101/jre/bin

add the following variables in .bashrc file for autoloading on system boot.

[root@server1] vi ~/.bash_profile

# export JAVA_HOME=/u01/jdk1.8.0_101
Setup JRE_HOME Variable
# export JRE_HOME=/u01/jdk1.8.0_101/jre
Setup PATH Variable
# export PATH=$PATH:/u01/jdk1.8.0_101/bin:/u01/jdk1.8.0_101/jre/bin

:wq (--save & exit)

Step 4:- Create Hadoop user

[root@server1 ~]# adduser hadoop
[root@server1 ~]# passwd hadoop

Now after creation of the user account, its required to create a key for ssh into its own account.

[root@server1 ~]# su - hadoop
[hadoop@server1 ~]$ ssh-keygen -t rsa
Generating public/private rsa key pair.
Enter file in which to save the key (/home/hadoop/.ssh/id_rsa):
Created directory '/home/hadoop/.ssh'.
Enter passphrase (empty for no passphrase):
Enter same passphrase again:
Your identification has been saved in /home/hadoop/.ssh/id_rsa.
Your public key has been saved in /home/hadoop/.ssh/id_rsa.pub.
The key fingerprint is:
c5:3e:25:c0:92:23:d0:17:fa:56:72:4c:79:72:4c:fe hadoop@server1.soumya.com
The key's randomart image is:
+--[ RSA 2048]----+
|  .o  .+o+.      |
|    o.=o+++      |
|    .o.o++= .    |
|     . + o +     |
|      o S o E    |
|     .     .     |
|                 |
|                 |
|                 |
+-----------------+
[hadoop@server1 ~]$
[hadoop@server1 ~]$ cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
[hadoop@server1 ~]$ chmod 0600 ~/.ssh/authorized_keys

Check the connectivity:-
[hadoop@server1 ~]$ ssh localhost
The authenticity of host 'localhost (::1)' can't be established.
RSA key fingerprint is 0b:59:e4:8b:b1:e6:12:3a:38:4f:ba:74:ef:8a:ad:46.
Are you sure you want to continue connecting (yes/no)? yes
Warning: Permanently added 'localhost' (RSA) to the list of known hosts.
[hadoop@server1 ~]$ exit
logout
Connection to localhost closed.
[hadoop@server1 ~]$

Step 5:- Download hadoop 2.6.0
[hadoop@server1 ~]$ wget http://apache.claz.org/hadoop/common/hadoop-2.6.0/hadoop-2.6.0.tar.gz
[hadoop@server1 ~]$ tar -zxvf hadoop-2.6.0.tar.gz
[hadoop@server1 u01]# mv hadoop-2.6.0 /home/hadoop/hadoop

Step 6:- Edit .bash_profile file and add the following lines from hadoop user.
[hadoop@server1 ~]$ vi /home/hadoop/.bash_profile
export PATH
#Java Env Variables
export JAVA_HOME=/u01/jdk1.8.0_101
export JRE_HOME=/u01/jdk1.8.0_101/jre
export PATH=$PATH:/u01/jdk1.8.0_101/bin:/u01/jdk1.8.0_101/jre/bin


#Hadoop Env Variables
export HADOOP_HOME=/home/hadoop/hadoop
export HADOOP_INSTALL=$HADOOP_HOME
export HADOOP_MAPRED_HOME=$HADOOP_HOME
export HADOOP_COMMON_HOME=$HADOOP_HOME
export HADOOP_HDFS_HOME=$HADOOP_HOME
export YARN_HOME=$HADOOP_HOME
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native
export PATH=$PATH:$HADOOP_HOME/sbin:$HADOOP_HOME/bin

:wq(-save & exit)

Now apply the changes in current running environment
[hadoop@server1 ~]$. /home/hadoop/.bash_profile

Now edit the following file and change the java path

[hadoop@server1 u01]# vi $HADOOP_HOME/etc/hadoop/hadoop-env.sh

export JAVA_HOME=/u01/jdk1.8.0_101

:wq

Now Edit hadoop configuration files and add the following lines.
[hadoop@server1 u01]# cd $HADOOP_HOME/etc/hadoop

[hadoop@server1 u01]# vi core-site.xml
<configuration>
<property>
  <name>fs.default.name</name>
    <value>hdfs://localhost:9000</value>
</property>
</configuration>

[hadoop@server1 u01]# vi hdfs-site.xml
<configuration>
<property>
 <name>dfs.replication</name>
 <value>1</value>
</property>

<property>
  <name>dfs.name.dir</name>
    <value>file:///home/hadoop/hadoopdata/hdfs/namenode</value>
</property>

<property>
  <name>dfs.data.dir</name>
    <value>file:///home/hadoop/hadoopdata/hdfs/datanode</value>
</property>
</configuration>

:wq


[hadoop@server1 u01]# vi mapred-site.xml
<configuration>
 <property>
  <name>mapreduce.framework.name</name>
   <value>yarn</value>
 </property>
</configuration>


:wq


[hadoop@server1 u01]# vi yarn-site.xml
<configuration>
 <property>
  <name>yarn.nodemanager.aux-services</name>
    <value>mapreduce_shuffle</value>
 </property>
</configuration>

:wq

Step 7:- Now format the namenode using following command.
[hadoop@server1 u01]# hdfs namenode -format

Sample output:

16/09/09 14:56:22 INFO namenode.NameNode: STARTUP_MSG:
/************************************************************
STARTUP_MSG: Starting NameNode
STARTUP_MSG:   host = server1.soumya.com/192.168.2.12
STARTUP_MSG:   args = [-format]
STARTUP_MSG:   version = 2.6.0
...
...
16/09/09 14:56:25 INFO common.Storage: Storage directory /home/hadoop/hadoopdata/hdfs/namenode has been successfully formatted.
16/09/09 14:56:25 INFO namenode.NNStorageRetentionManager: Going to retain 1 images with txid >= 0
16/09/09 14:56:25 INFO util.ExitUtil: Exiting with status 0
16/09/09 14:56:25 INFO namenode.NameNode: SHUTDOWN_MSG:
/************************************************************
SHUTDOWN_MSG: Shutting down NameNode at server1.soumya.com/192.168.2.12
************************************************************/


Step 8:-
Now start the Hadoop Cluster
[hadoop@server1 sbin]$ cd $HADOOP_HOME/sbin/
Now run start-dfs.sh script

[hadoop@server1 sbin]$ start-dfs.sh
16/09/09 15:07:54 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
Starting namenodes on [localhost]
hadoop@localhost's password:
localhost: starting namenode, logging to /home/hadoop/hadoop/logs/hadoop-hadoop-namenode-server1.soumya.com.out
hadoop@localhost's password:
localhost: starting datanode, logging to /home/hadoop/hadoop/logs/hadoop-hadoop-datanode-server1.soumya.com.out
Starting secondary namenodes [0.0.0.0]
The authenticity of host '0.0.0.0 (0.0.0.0)' can't be established.
RSA key fingerprint is 0b:59:e4:8b:b1:e6:12:3a:38:4f:ba:74:ef:8a:ad:46.
Are you sure you want to continue connecting (yes/no)? yes
0.0.0.0: Warning: Permanently added '0.0.0.0' (RSA) to the list of known hosts.
hadoop@0.0.0.0's password:
0.0.0.0: starting secondarynamenode, logging to /home/hadoop/hadoop/logs/hadoop-hadoop-secondarynamenode-server1.soumya.com.out
16/09/09 15:08:24 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable

Now run start-yarn.sh script.

[hadoop@server1 sbin]$ start-yarn.sh
starting yarn daemons
starting resourcemanager, logging to /home/hadoop/hadoop/logs/yarn-hadoop-resourcemanager-server1.soumya.com.out
hadoop@localhost's password:
localhost: starting nodemanager, logging to /home/hadoop/hadoop/logs/yarn-hadoop-nodemanager-server1.soumya.com.out

Step 9:-
Check the hadoop services from browser

http://server1.soumya.com:50070/

To access pthe information about cluster and all applications

http://server1.soumya.com:8088/

To get information about secondary namenode.
http://server1.soumya.com:50090/


Step 10:-Test Hadoop Single Node Setup

[hadoop@server1 sbin]$ hdfs dfs -mkdir /user
16/09/09 15:36:27 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
[hadoop@server1 sbin]$ hdfs dfs -mkdir /user/soumya
16/09/09 15:36:37 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable