2020-11-26

Linux CLI, SpeedTest

#/opt/script/SPEEDTEST.sh
#10:06 2020.04.07
##############################
#REF: https://williamyaps.github.io/wlmjavascript/servercli.html
now1="$(date +'%Y.%m.%d_%H.%M.%S.%3N')"
CITY="Hanoi"
##############################

#_________[SpeedTest_Script]:BEGIN
RESULT_FLD="/opt/temp"
mkdir -p $RESULT_FLD
mkdir -p /opt/script/;cd /opt/script/
#wget https://raw.githubusercontent.com/sivel/speedtest-cli/master/speedtest.py
#chmod +x speedtest.py
/opt/script/speedtest.py --list > $RESULT_FLD/speedtest-server-list-all.txt
cat $RESULT_FLD/speedtest-server-list-all.txt | grep "$CITY" | awk '{gsub(")",""); print $1}' > $RESULT_FLD/$CITY-isp-full-name.txt

cd $RESULT_FLD
while read LINE; \
do /opt/script/speedtest.py --server "$LINE"; \
done < $RESULT_FLD/$CITY-isp-full-name.txt > $RESULT_FLD/$CITY-isp-result-$now1.txt
#_________[SpeedTest_Script]:END


#______________________DELETE_OLD_DATA:BEGIN
# SERVER_NAME=$(hostname)
# mkdir -p /opt/bk/$SERVER_NAME/
# cd /opt/bk/$SERVER_NAME/
NumberOfFileWantToKeep=3
#Giu lai [3] file moi nhat trong thu muc [$FOLDER_DST]:
#XOA FILE:
#/bin/rm -rf `ls -t "result*.txt" | awk 'NR>3'`
/bin/rm -rf `ls -t *.txt | awk 'NR>3'`
#XOA FOLDER:
/bin/rm -rf `ls -td */ | awk 'NR>3'`
#______________________DELETE_OLD_DATA:END
#THE_END

2020-11-25

Solve “The following signatures were invalid: BADSIG” in Ubuntu

 #15:30 2020.11.25

Solve “The following signatures were invalid: BADSIG” in Ubuntu
In this post, we will discuss another very common problem that occurs when users upgrade or update their system. This error is “BADSIG” error which looks something like this:

W: A error occurred during the signature verification. The repository is not updated and the previous index files will be used. GPG error: http://extras.ubuntu.com precise Release: The following signatures were 
invalid: BADSIG 16126D3A3E5C1192 Ubuntu Extras Archive Automatic Signing Key

W: GPG error: http://ppa.launchpad.net precise Release:
The following signatures were invalid: BADSIG 4C1CBC1B69B0E2F4 Launchpad PPA for Jonathan French W: Failed to fetch 


http://extras.ubuntu.com/ubuntu/dists/precise/Release
sudo apt-get clean
cd /var/lib/apt
sudo mv lists oldlist
sudo mkdir -p lists/partial
sudo apt-get clean
sudo apt-get update


REF: https://itsfoss.com/solve-badsig-error-quick-tip/

2020-11-19

HaProxy: re-route to other backend based on REGEX

acl URL_upload_mgiay_com           hdr(host) -i    upload.mgiay.com

#CURRENT STORAGE: node02
acl URL_file_mgiay_com                  hdr(host) -i    file.mgiay.com



#OLD STORAGE: node01
#_____OLD_STORAGE_[10.0.2.127]:BEGIN
#2010-2019:
acl URL_file_mgiay_com_s01 url_reg /(resize|crop)/\d+x\d+/201[0-9]/\d{1,2}/
acl URL_file_mgiay_com_s01 url_reg /201[0-9]/\d{1,2}/

#2020.01-2020.10:
acl URL_file_mgiay_com_s01 url_reg /(resize|crop)/\d+x\d+/2020/(0[1-9]|10)/
acl URL_file_mgiay_com_s01 url_reg /2020/(0[1-9]|10)/

#2020.11.01-2020.11.18:
acl URL_file_mgiay_com_s01 url_reg /(resize|crop)/\d+x\d+/2020/11/(0[1-9]|1[0-8])/
acl URL_file_mgiay_com_s01 url_reg /2020/11/(0[1-9]|1[0-8])/
#_____OLD_STORAGE_[10.0.2.127]:END




use_backend BE_URL_upload_mgiay_com       if  URL_upload_mgiay_com
use_backend BE_URL_file_mgiay_com_s01     if  URL_file_mgiay_com URL_file_mgiay_com_s01
use_backend BE_URL_file_mgiay_com              if  URL_file_mgiay_com



#_____________________________________________mgiay_STORAGE:BEGIN
backend BE_URL_upload_mgiay_com
    mode http
    balance roundrobin
    #server Storage_127_8081 10.0.2.127:8081 check
    #server Storage_100_8081 2.2.2.100:8081 check backup
    #server 2.2.2.126:8081  2.2.2.126:8081 check
    server  10.0.2.126:8081  10.0.2.126:8081 check
    
backend BE_URL_file_mgiay_com_s01
    #acl blacklist src -f /etc/haproxy/blacklist.lst
    #http-request deny if blacklist
    mode http
    #balance source
    balance roundrobin
    #redirect scheme https code 301 if !{ ssl_fc }
    cookie BE_URL_file_mgiay_com_s01 insert indirect
    #server Storage_2.92_63202 10.0.2.92:63202 check
    #server Storage_127_64280 10.0.2.127:64280 check backup
    #server Storage_100_64280 10.0.2.100:64280 check backup
    server  10.0.2.127:64280 10.0.2.127:64280 cookie 127_64280 weight 1 check
    
backend BE_URL_file_mgiay_com
    # acl blacklist src -f /etc/haproxy/blacklist.lst
    # http-request deny if blacklist
    mode http
    balance roundrobin
    #server Storage_2.92_63202 10.0.2.92:63202 check    
    #server Storage_127_64280 10.0.2.127:64280 check backup 
    #server Storage_100_64280 10.0.2.100:64280 check backup
    cookie BE_URL_file_mgiay_com insert indirect
    server  10.0.2.126:64280 10.0.2.126:64280 cookie 126_64280 weight 1 check
#_____________________________________________mgiay_STORAGE:END

2020-11-12

GNUPLOT: Drawing chart in Linux Terminal

#sudo apt-get -y install gnuplot-nox
#sudo apt-get -y install gnuplot-qt
#sudo apt-get -y install gnuplot-x11
#sudo apt-get -y install plotutils

time (gnuplot < gpcommands.txt)



#gpcommands.txt
#9:16 2020.11.13
#https://bhoey.com/blog/simple-time-series-graphs-with-gnuplot/
#http://gnuplot.sourceforge.net/docs_4.2/node274.html
#https://alvinalexander.com/technology/gnuplot-charts-graphs-examples/

set xdata time                          # Indicate that x-axis values are time values

#2020.11.12-17:17:13
#set timefmt "%Y-%m-%d"                 # Indicate the pattern the time values will be in
set timefmt "%Y.%m.%d-%H:%M:%S"         # Indicate the pattern the time values will be in

#set format x "%m/%y"                   # Set how the dates will be displayed on the plot
set format x "%Y.%m.%d-%H:%M:%S"        # Set how the dates will be displayed on the plot
 
#set xrange ["2020.11.01-00:00:00":"2020.11.30-23:59:00"]  # Set x-axis range of values
#set yrange [0:30]                       # Set y-axis range of values
 
set key off                             # Turn off graph legend
set xtics rotate by -90                 # Rotate dates on x-axis 45deg for cleaner display
set title 'REDIS (2.2.2.124:6390) MONITOR CHART RESULT'  # Set graph title
 
set xlabel "Time (2020.11.12-17:17:04 --> 2020.11.13-07:51:37)"
set ylabel "Metric (ms)"
set autoscale y
set autoscale x
set xrange ["2020.11.12-17:00:00":"2020.11.13-08:00:00"]
set grid
set timestamp

#set terminal jpeg                       # Set the output format to jpeg
#set output 'output-2.jpg'               # Set output file to output.jpg

#set terminal png                       # Set the output format to PNG
#set terminal png size 1280, 1024
#set terminal png size 5000, 1024
set terminal png size 3000, 1024
#set terminal png size 1920, 1080
set output "RedisMetricMonitor.png"
 
#plot 'result.txt' using 1:2 with linespoints linetype 55000 linewidth 2
#plot 'result.txt' using 1:2 with lines linetype 55000 linewidth 2
#plot 'result.txt' using 1:2 with filledcurves linetype 55000 linewidth 2
plot 'result.txt' using 1:2 with lines



#result.txt
2020.11.12-17:17:04 0.36
2020.11.12-17:17:05 0.35
2020.11.12-17:17:06 0.34
2020.11.12-17:17:07 0.26
2020.11.12-17:17:08 0.41
2020.11.12-17:17:09 0.21
2020.11.12-17:17:10 0.23
2020.11.12-17:17:11 0.23
2020.11.12-17:17:12 0.31
2020.11.12-17:17:13 0.23




2020-11-10

PostgreSQL: Backup/Restore Parallels

 #!/bin/bash
#/opt/script/postgres.[db].[backup].[full].sh
#LastUpdate: #15:28 2019.08.08
###########################################
#______________POSTGRESQLDB_BACKUP:BEGIN
#BACKUP POSTGRESQLDB daily:
#10 00 * * * /opt/script//opt/script/postgres.[db].[backup].[full].sh DB_NAME
#______________POSTGRESQLDB_BACKUP:END
###########################################
source /opt/postgres/POSTGRES_VERSION.txt
#____DBNAME_WANT_TO_BACKUP:BEGIN <<<<<-----
DB_NAME=$1
LAN_IP=$(ip a s|sed -ne '/127.0.0.1/!{s/^[ \t]*inet[ \t]*\([0-9.]\+\)\/.*$/\1/p}' | grep 2.2.2)
#____DBNAME_WANT_TO_BACKUP:END   <<<<<-----
if [ "$#" -eq  "0" ]
  then
    echo "Usage: $0 DB_NAME"
  else
    mkdir -p $PG_ARCHIVED$DB_NAME
    chown -R postgres.postgres $PG_ARCHIVED
    now1="$(date +'%Y.%m.%d-%H.%M.%S.%3N')"
    DB_RESULT=$PG_ARCHIVED$DB_NAME/$LAN_IP.$DB_NAME.[db].[backup].[full].[parallel].[$now1]
    echo "########################################" >> $DB_RESULT.log
    echo "BACKUP FULL DB_NAME: [$DB_NAME]"          >> $DB_RESULT.log
    echo "-------------------"                      >> $DB_RESULT.log
    echo "SERVER INFORMATION:"                      >> $DB_RESULT.log
    echo "-------------------"                      >> $DB_RESULT.log
    ip a s | grep inet      >> $DB_RESULT.log
    echo "----------------------------------------" >> $DB_RESULT.log
    hostnamectl status      >> $DB_RESULT.log
    echo "----------------------------------------" >> $DB_RESULT.log
    df -h                   >> $DB_RESULT.log
    echo "----------------------------------------" >> $DB_RESULT.log
    timedatectl | grep Time >> $DB_RESULT.log
    echo "CurrentTime: $(date)">> $DB_RESULT.log
    echo "----------------------------------------" >> $DB_RESULT.log
    echo "CPU STATS: ">> $DB_RESULT.log
    top -b -n 1 | head -n 5 >> $DB_RESULT.log
    echo "----------------------------------------" >> $DB_RESULT.log
    printf "RAM FREE: \n $(free -h)\n"              >> $DB_RESULT.log
    echo "----------------------------------------" >> $DB_RESULT.log
    echo "CPU INFO: $(cat /proc/cpuinfo | grep Xeon | head -n 1)"   >> $DB_RESULT.log
    #____BACKUP_SCRIPT_parallel:BEGIN
    su - postgres -c "\
    source /opt/postgres/POSTGRES_VERSION.txt;\
    $PG_SQL/bin/pg_dump --host 127.0.0.1 \
    -Fd -j $(cat /proc/cpuinfo | grep Xeon | wc -l) \
    --verbose \
    --dbname=$DB_NAME \
    -f $DB_RESULT" >> $DB_RESULT.log.detail 2>&1
    #____BACKUP_SCRIPT_parallel:END

    #____COMPRESS_BACKUP_FOLDER:BEGIN
    cd $PG_ARCHIVED$DB_NAME
    tar -cf $LAN_IP.$DB_NAME.[db].[backup].[full].[parallel].[$now1].tar.gz $LAN_IP.$DB_NAME.[db].[backup].[full].[parallel].[$now1]
    /bin/rm -rf  $LAN_IP.$DB_NAME.[db].[backup].[full].[parallel].[$now1]
    #____COMPRESS_BACKUP_FOLDER:END
    md5sum $LAN_IP.$DB_NAME.[db].[backup].[full].[parallel].[$now1].tar.gz > $LAN_IP.$DB_NAME.[db].[backup].[full].[parallel].[$now1].tar.gz.md5.txt


    echo "########################################" >> $DB_RESULT.log
    echo "BACKUP RESULTED:"         >> $DB_RESULT.log
    cat  $LAN_IP.$DB_NAME.[db].[backup].[full].[parallel].[$now1].tar.gz.md5.txt    >> $DB_RESULT.log
    ls -lh "$DB_RESULT"*            >> $DB_RESULT.log
    echo "BACKUP FULL DB_NAME START: [$DB_NAME]-[$now1]"    >> $DB_RESULT.log
    now2="$(date +'%Y.%m.%d-%H.%M.%S.%3N')"
    echo "BACKUP FULL DB_NAME END  : [$DB_NAME]-[$now2]"    >> $DB_RESULT.log
    echo "########################################"         >> $DB_RESULT.log
    echo "" >> $DB_RESULT.log

    #______________________DELETE_OLD_DATA:BEGIN
    cd $PG_ARCHIVED$DB_NAME
    NumberOfFileWantToKeep=24
    echo "PG_ARCHIVED: [$PG_ARCHIVED]"  >> $DB_RESULT.log
    echo "Current Dir: [$(pwd)]"        >> $DB_RESULT.log
    echo "NumberOfFileWantToKeep: [$NumberOfFileWantToKeep]" >> $DB_RESULT.log
    #Giu lai [24] file moi nhat trong thu muc [$FOLDER_DST]:
    echo "Truncate Old Data In: [$PG_ARCHIVED]"     >> $DB_RESULT.log
    echo "rm -rf `ls -t \"$LAN_IP.$DB_NAME.[db].[backup].[full]\"* | awk 'NR>24'`"   >> $DB_RESULT.log
    /bin/rm -rf `ls -t "$LAN_IP.$DB_NAME.[db].[backup].[full]"* | awk 'NR>24'`
    #______________________DELETE_OLD_DATA:END
    #____FREE_RAM:BEGIN
    free && sync && echo 3 > /proc/sys/vm/drop_caches && free
    free
    # printf "RAM FREE: \n $(free -h)\n"  >> $DB_RESULT.log
    #____FREE_RAM:END
    echo "" >> $DB_RESULT.log
    echo "BACKUP JOB DONE AT [$(date +'%Y.%m.%d-%H.%M.%S.%3N')]" >> $DB_RESULT.log
    echo "" >> $DB_RESULT.log
    chown -R postgres.postgres $PG_ARCHIVED
fi

#THE-END
# #BACKUP: OK
# # -Fd: Phai cung version:
# # http://evol-monkey.blogspot.com/2013/10/parallel-pgdump-backups.html
# # I was talking with a friend about this feature and he said: Who does pg_dump now days anyway? Everyone is doing live backups
date1=$(date);
su - postgres -c "\
source /opt/postgres/POSTGRES_VERSION.txt;
/opt/postgres/bin/pg_dump \
-h 127.0.0.1 \
-p 5432 \
-Fd \
-j $(cat /proc/cpuinfo | grep Xeon | wc -l) \
--dbname='PostgreSQL_DB_NAME' \
--verbose \
-f /opt/temp/PostgreSQL_DB_NAME_db\
"
#
date2=$(date); echo $date1; echo $date2;
#
Tue Nov 10 15:08:50 +07 2020
Tue Nov 10 15:09:54 +07 2020


#RESTORE: OK
date1=$(date);
su - postgres -c "\
source /opt/postgres/POSTGRES_VERSION.txt;
/opt/postgres/bin/pg_restore \
-h 127.0.0.1 \
-p 5432 \
-Fd \
-j $(cat /proc/cpuinfo | grep Xeon | wc -l) \
--dbname='PostgreSQL_DB_NAME_beta_20201110' \
--verbose \
'/opt/temp/PostgreSQL_DB_NAME_db' \
"
#
date2=$(date); echo $date1; echo $date2;
#

#RESTORE TO NEW DB, WITH FULL DATA:
#postgres-connect-dba.sh
#CREATE DATABASE "PostgreSQL_DB_NAME";
#\q
#su - postgres -c 'pg_restore --dbname="PostgreSQL_DB_NAME" --verbose /db/bk/PostgreSQL_DB_NAME_dev.full-2016.12.05-16.27.13.293.tar'
# #DROP OLD DB:
# postgres=# select pg_terminate_backend(pid) from pg_stat_activity where datname='PostgreSQL_DB_NAME';
# postgres=# drop database "PostgreSQL_DB_NAME";
# #CREATE NEW DB NAME:
# postgres=# CREATE DATABASE "PostgreSQL_DB_NAME";
# postgres=# \q
# #RESTORE OLD DBNAME TO NEW DB NAME, WITH FULL DATA:
# #11:46 2019.01.13
# ##################
# #Souce: 
# https://www.postgresql.org/docs/10/app-pgdump.html
# https://www.postgresql.org/docs/10/app-pgrestore.html

# ###########
# #THE-END###
# ###########

2020-10-25

PostgreSQL: Check config file loaded on runtime

1/ Current config loaded to memory at runtime
TABLE pg_file_settings;
SELECT * FROM pg_catalog.pg_file_settings order by name asc;
SELECT * FROM pg_catalog.pg_file_settings order by sourceline asc;



































postgres=# SELECT count (*) FROM pg_catalog.pg_file_settings;
 count 
-------
    77
(1 row)



2/ Ref:
SELECT pg_read_file('pg_hba.conf');
select pg_reload_conf();

2020-10-24

PatroniCluster 3 Node: [postgresql.base.conf]

#/opt/postgres/pgdata/postgresql.base.conf
#LastUpdate: #18:13 2020.10.22
#cat /opt/postgres/pgdata/postgresql.base.conf | egrep "LastUpdate|#10.0.2.*"
################################################################
#scp /opt/postgres/pgdata/postgresql.base.conf root@srv112:/opt/postgres/pgdata/postgresql.base.conf; scp /opt/postgres/pgdata/postgresql.base.conf root@srv113:/opt/postgres/pgdata/postgresql.base.conf
################################################################
#export PATH=$PATH:/opt/postgres/pgscript; postgres-check-running-config.sh
#postgres=# TABLE pg_file_settings;
#10.0.2.111*
#10.0.2.112*
#10.0.2.113*

################################################################
checkpoint_completion_target = '0.9'
cluster_name = 'postgres'
datestyle = 'iso, mdy'
default_statistics_target = '500'
default_text_search_config = 'pg_catalog.english'
dynamic_shared_memory_type = 'posix'
effective_cache_size = '12GB'
effective_io_concurrency = '200'
hot_standby = 'on'
lc_messages = 'en_US.UTF-8'
lc_monetary = 'en_US.UTF-8'
lc_numeric = 'en_US.UTF-8'
lc_time = 'en_US.UTF-8'
listen_addresses = '0.0.0.0'
log_destination = 'syslog'
log_directory = '/opt/postgres/pglog'
log_filename = 'postgresql-%Y-%m-%d_%H%M%S.log'
log_line_prefix = '%t user=%u database=%d client_ip=%h '
log_min_duration_statement = '0'
log_rotation_age = '1d'
log_rotation_size = '100MB'
log_statement = 'ddl'
log_timezone = 'Asia/Saigon'
log_truncate_on_rotation = 'True'
logging_collector = 'True'

maintenance_work_mem = '2GB'
max_connections = '1000'
max_locks_per_transaction = '64'
max_parallel_workers = '8'
max_parallel_workers_per_gather = '4'
max_prepared_transactions = '0'
max_replication_slots = '10'
max_wal_senders = '10'
max_wal_size = '8GB'
max_worker_processes = '8'

min_wal_size = '4GB'
port = '5432'
random_page_cost = '1.1'
shared_buffers = '4GB'
timezone = 'Asia/Saigon'
track_commit_timestamp = 'off'
unix_socket_directories = '.'
wal_buffers = '16MB'
wal_keep_segments = '8'
wal_level = 'replica'
wal_log_hints = 'on'
work_mem = '524kB'
hba_file = '/opt/postgres/pgdata/pg_hba.conf'
ident_file = '/opt/postgres/pgdata/pg_ident.conf'

pg_stat_statements.max = 10000
pg_stat_statements.track = all
shared_preload_libraries = 'pg_stat_statements, pgaudit'
track_activity_query_size = 2048
pg_stat_statements.track = all
#THE_END