# Check SSH connection activities by reading its log (Redhat / Centos)
tail -f -n 50 /var/log/secure | grep sshd

# Set Linux OS (with SystemD) to run without GUI
sudo systemctl set-default multi-user
# sudo reboot
   ┌─────────┬───────────────────┐
   │Runlevel │ Target            │
   ├─────────┼───────────────────┤
   │0        │ poweroff.target   │
   ├─────────┼───────────────────┤
   │1        │ rescue.target     │
   ├─────────┼───────────────────┤
   │2, 3, 4  │ multi-user.target │
   ├─────────┼───────────────────┤
   │5        │ graphical.target  │
   ├─────────┼───────────────────┤
   │6        │ reboot.target     │
   └─────────┴───────────────────┘
# Reverse previous
sudo systemctl set-default graphical
# sudo reboot

# Performing NSlookup
record=kimconnect.com
dig @8.8.8.8 +short $record

# Create dump from WEB01
mysqldump -uroot -p\!PASSWORD\! --databases kimconnect | gzip > /tmp/kimconnect.gz
chmod 777 /tmp/kimconnect.gz

# Pull kimconnect from WEB01 while login WEB02
scp -P{port-number} webadmin@web01:/tmp/kimconnect.gz /home/webadmin/pkg/share/httpd/htdocs

# Import DB
nohup gunzip < kimconnect.gz | mysql -uwebadmin -pPASSWORD --socket=/home/webadmin/pkg/var/run/mysqld/mysqld.sock &

# Put files to NAS
cd /home/webadmin/
#sftp -oStrictHostKeyChecking=no webadmin@nas:/mnt/array1/operations/BackUps/kimconnect
sftp -oStrictHostKeyChecking=no webadmin@nas:/mnt/array1/operations/BackUps/kimconnect
put kimconnect_baseline.gz exit

# Pull file from NAS
cd /home/webadmin/
#sftp -oStrictHostKeyChecking=no webadmin@web01:/mnt/array1/operations/BackUps/kimconnect_CLONES
sftp -oStrictHostKeyChecking=no webadmin@nas:/mnt/array1/operations/BackUps/kimconnect_CLONES
get pkgsrc_baseline.gz exit

# Compress folder and save in /tmp directory
tar -C /directory/of/folder -czvf /tmp/folder-name.gz folder-name
chmod 777 /tmp/folder-name.gz

# Run mysql command from shell
mysql -uwebadmin -pPASSWORD --execute="{command-here};"

# Push files to Remote while login to Local with non-standard port (remove option 'z' for no compression)
rsync -trvz -e 'ssh -p {port-number}' --progress /local/folder webadmin@{remote-ip}:{/remote/folder}

# Pull files from Remote to Local with non-standard port (remove option 'z' for no compression)
rsync -chavzP -e 'ssh -p {port-number}' webadmin@{remote-ip}:/path/to/copy /local/path

# Show memory & cpu:
cat /proc/meminfo
cat /proc/cpuinfo

# Show linux version:
uname -a

# Show history of commands:
history

# Run as admin user and then sudo to root shell
su - %admin-user%
sudo -s

# Check to see who you are:
whoami

# Shell symbols:
# = root
$ = non-root user

# Network:
ip addr
ip link

# Install CentOS7 networking tools to run ifconfig:
yum install net-tools
ifconfig -a

# networking
dhclient -r  #renew ip all interfaces
dhclient  #check dhcp client status
ifconfig #restart network services similar to /etc/init.d/network restart
ifup
ifdown

# shutdown
shutdown now

# Firewalld
firewall-cmd --permanent --direct --add-rule ipv4 filter INPUT 0 -p icmp -s X.X.X.X -j ACCEPT

# How to terminate processes:
ps -A #list all running processes
ps aux | grep [APPNAME]
pidof [APPNAME] #find PID of an app by name
kill [PID]
kill -9 [PID] #forcefully kill PID
killall [APPNAME] #terminate all related processes
pkill [APPNAME] #alternative kill process command

# Searching
find /home/webadmin/webserver -name "*.log"

# Ports and Daemons:
netstat -tlnp

# Add text at end of file
cat << EOF > /path/to/filename
line 1
line 2
EOF

# Check OS
uname -a
cat /etc/redhat-release #specific to Redhat/CentOS

# check IP
ifconfig
ip addr
ip addr show eth0

# Check services running on ports
sudo netstat -tulpn

# Check service by name
service mysql status        #old command
systemctl status mysql        #systemd (newer command)
ps aux | grep mysql
pgrep mysql

# View services by cpu/mem consumption
top
htop

# Disk free
df -ah         #all human readable

# Size of a directory
du -sh /usr    #disk usage size human readable

# How to mount
mount     #check existing
mount /dev/sda2 /mnt    #mount sda2 to /mnt

# check disk partitions
etc/fstab

# View disks and mount points
sudo fdisk -l

# Insert line to file after matching pattern
sed -i '/PATTERN-TO-MATCH/a NEW-LINE' /path/to/file

# Check OS Versions
[webadmin@server01 ~]$ lsb_release -a
LSB Version:    :core-4.0-ia32:core-4.0-noarch
Distributor ID:    Fedora
Description:    Fedora release 14 (Laughlin)
Release:    14
Codename:    Laughlin

[webadmin@server02 ~]$ cat /proc/version
Linux version 2.6.32-028stab118.1 (root@kbuild-rh5-x64) (gcc version 4.1.2 20080704 (Red Hat 4.1.2-46)) #1 SMP Thu Feb 12 16:43:22 MSK 2015

[webadmin@server03]$ cat /etc/redhat-release
CentOS release 6.5 (Final)

#List all running processes:
ps axu |more

#List ssh processes, hung sessions:
ps aux | grep ssh

#list active processes:
top

#Check to see who else is logged on:
 who


#Find an application:
which [appname]

#Find out who owns a file or folder:
stat /path-to-file-or-folder

#Change access permissions:
chown [user]:[group] [path-to-file-or-folder]
chmod [666/755/775/777] [path-to-file-or-folder]

#check disk space:
df

# to display name of server
hostname


#to display the domain name where server resides
domainname


#Clear sendmail dir:
rm -rf /var/spool/mqueue/*

# Testing email
Yum install sendmail
service sendmail start
echo "Subject: test" | sendmail -v [email protected]

#Commands to backup database:
/opt/lampp/bin/mysqldump -u root -p kimconnect > /home/kimconnect.sql
PASSWORD
/opt/lampp/bin/mysqldump -u kimconnect -p gallery2 > /home/gallery2.sql
PASSWORD

#database files location:
/opt/lampp/var/mysql/gallery2

# Backup Pertinent files:
cp -r /opt/lampp/htdocs/kimconnect /home/kimconnect
cp -r /opt/lampp/gallery2 /home/gallery2

#Use GZip:
cd /home
gzip kimconnect.sql gallery2.sql
gzip -r kimconnect gallery69
gzip kimconnect.sql.gz gallery2.sql.gz kimconnect.gz > backup.gz

#GunZip decompress:
gunzip backup.gz
gunzip -r kimconnect
gunzip -r gallery2


#TAR: create
tar cvzf backup.tar.gz kimconnect gallery2 gallery2.sql.gz kimconnect.sql.gz

#UNTAR: extract
tar xvzf backup.tar.gz

#Most often you find Tar and Gzip used in concert to create "gzipped archives" with .tar.gz extensions (or its abbreviated form, .tgz). While you can obviously use the commands separately, tar's -z option feeds the archive through gzip after packing and before unpacking, Thus:

% tar -czvf archive.tar.gz file1 file2 dir/ #Creates a gzipped archive, archive.tar.gz

% tar -xzvf archive.tar.gz #Extracts all files from the gzipped archive and,

% tar -tzvf archive.tar.gz #Lists the contents of the gzipped archive without extracting them. (You can also have tar use other compression tools such as bzip2 [-j] and compress [-Z])
#-------------------------------------------------------

#CHOWN:
chown -hR nobody /opt/lampp/htdocs

#-------------------------------------------------------

#COPY:
cp -rf /home /media/KT_EXTERNAL/website

#-------------------------------------------------------

tcpdump not tcp port 22

#-------------------------------------------------------

#Shutdown:
shutdown -h now         #force shutdown
shutdown -r                # reboot

#Set the clock:
hwclock --set --date='09/27/07 21:08:40' --utc
hwclock --hctosys

Other common commands:

– pwd – show current directory
– cd – change current directory
– ls – list directory contents
– chmod – change file permissions
– chown – change file ownership
– cp – copy files
– mv – move files
– rm – remove files
– mkdir – make directory
– rmdir – remove directory
– cat – dump file contents
– less – progressively dump file
– vi – edit file (complex)
– nano – edit file (simple)
– head – trim dump to top
– tail – trim dump to bottom
 
– echo – print/dump value
– env – dump environment variables
– export – set environment variable
– history – dump command history
– grep – search dump for strings
– man – get help on command
– apropos – show list of man pages
– find – search for files
– tar – create/extract file archives
– gzip – compress a file
– gunzip – decompress a file
– du – show disk usage
– df – show disk free space
– mount – mount disks
– tee – write dump to file in parallel
– hexdump – readable binary dumps