2014年9月30日 星期二

mysql-increment mult row

set @i=0;
UPDATE PRODUCT SET NO=(@i:=@i+1);
 UPDATE PRODUCT SET DOC_NO=NO;
ALTER TABLE PRODUCT AUTO_INCREMENT = 689;
alter TABLE erp.REPORT AUTO_INCREMENT=119;

2014年9月24日 星期三

html-input-onblur

onblur='
var s="tableRead?db=erp&table=USER&fields=LOGIN_ID&format=S&offset=0
&where=where (LOGIN_ID:`"+this.value.trim()+"`)&callback=?";
$.getJSON(s,function(data) {
if (data[0])
{$("#loginIDStatus").html("occupied.");
window.setTimeout(function () {
$("#newUsereMail").val("").focus();
}, 0);
}
});
'

javabean

http://stackoverflow.com/questions/14307633/servlets-with-javabeans

2014年9月18日 星期四

linux-cp


copy multi file to multi file

for file in source* ; do cp "$file" "${file/source/Target}";done

for file in salesSubject* ; do cp "$file" "${file/salesSubject/purchaseSubject}";done


for file in purchaseApply* ; do cp "$file" "${file/purchaseApply/officePurchaseApply}";done

for file in purchaseRequest* ; do cp "$file" "${file/purchaseRequest/officePurchaseRequest}";done


for file in collectionNote* ; do cp "$file" "${file/collectionNote/specialRequestDNote}";done


for file in salesman* ; do cp "$file" "${file/salesman/pjInCharge}";done

for file in paymentTerms* ; do cp "$file" "${file/paymentTerms/paymentTermsFormula}";done


for file in dNotes* ; do cp "$file" "${file/dNotes/returnNote}";done

for file in customerCategory* ; do cp "$file" "${file/customerCategory/supplierCategory}";done

for file in productMaster* ; do cp "$file" "${file/productMaster/internalItem}";done

for file in salesType* ; do cp "$file" "${file/salesType/internalType}";done

for file in purchaseRequest* ; do cp "$file" "${file/purchaseRequest/stockIssue}";done

for file in BOM* ; do cp "$file" "${file/BOM/salesOrderPayment}";done

for file in uom* ; do cp "$file" "${file/uom/dept}";done


for file in BOM* ; do cp "$file" "${file/BOM/purchaseRequest}";done

for file in solutionType* ; do cp "$file" "${file/solutionType/quotationStatus}";done

for file in inBox* ; do cp "$file" "${file/inBox/quotation}";done

for file in category* ; do cp "$file" "${file/category/solutionType}";done

for file in salesType* ; do cp "$file" "${file/salesType/pjTeam}";done

for file in productCategory* ; do cp "$file" "${file/productCategory/TARGET_FILE_NAME}";done

for file in productCategory* ; do cp "$file" "${file/productCategory/maintainType}";done

for file in category* ; do cp "$file" "${file/category/pvia}";done

linux -ftp

#!/bin/sh
HOST='192.168.0.1'
USER='userName'
PASSWD='userPassword'
FILE='/Folder/SubFolder'

lftp -n $HOST <<END_SCRIPT
quote USER $USER
quote PASS $PASSWD
rm -r $FILE
quit
END_SCRIPT
exit 0

linux cron backup

lftp -c "open -u userName,userPassword ServerName; rm -r /target/Folder"

++++++++++++++++++++++++++++++++++++++++++++

cat /etc/crontab
# /etc/crontab: system-wide crontab
# Unlike any other crontab you don't have to run the `crontab'
# command to install the new version when you edit this file
# and files in /etc/cron.d. These files also have username fields,
# that none of the other crontabs do.

SHELL=/bin/sh
PATH=/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin

# m h dom mon dow user  command
17 *    * * *   root    cd / && run-parts --report /etc/cron.hourly
#0,5,10,15,20,25,30,35,40,45,50,55 * * * * root mysqldump -uroot -proot BR201312111529 > /backup/BR201312111529.sq

0 23 * * * root /backup/backupDay TopLevel BR201312111529
0 22 * * * root /backup/backupDay Integrity BR50881022
#0 23 * * 1 root /backup/backupMon
#0 23 * * 2 root /backup/backupTue
#0 23 * * 3 root /backup/backupWed
#0 23 * * 4 root /backup/backupThu
#0 23 * * 5 root /backup/backupFri
#0 23 * * 6 root /backup/backupSat
#0 23 * * 7 root /backup/backupSun
25 6    * * *   root    test -x /usr/sbin/anacron || ( cd / && run-parts --report /etc/cron.daily )
50 23 * * 5 root /backup/backupWeek TopLevel BR201312111529
50 22 * * 5 root /backup/backupWeek Integrity BR50881022
47 6    * * 7   root    test -x /usr/sbin/anacron || ( cd / && run-parts --report /etc/cron.weekly )
52 6    1 * *   root    test -x /usr/sbin/anacron || ( cd / && run-parts --report /etc/cron.monthly )
#
+++++++++++++++++++++++++++++++++++++++++++++++++

cat backupDay
#!/bin/bash
# Incremental Backup
#d=`date +%y%m%d%H%M%S`
d=`date +%a`
s=$1
lftp -c "open -u backup_erp,4r5t6y7u 192.168.20.8/Backup_ERP; rm -r /Backup_ERP/$s/$d"
mysqldump -ususer -pez=TopLevel $2 > /backup/$2.sql
mysqldump -ususer -pez=TopLevel erp > /backup/erp.sql
tar -jvcf /backup/$2.tar /backup/$2.sql
ncftpput -m -u backup_erp -p 4r5t6y7u 192.168.20.8 /Backup_ERP/$s/$d/db_dump /backup/*.*
ncftpput -R -m -u backup_erp -p 4r5t6y7u 192.168.20.8 /Backup_ERP/$s/$d/attachment /erp/$2
#ncftpput -R -m -u backup_erp -p 4r5t6y7u 192.168.20.8 /Backup_ERP/$s/$d/attachment /erp/$2
ncftpput -m -u backup_erp -p 4r5t6y7u 192.168.20.8 /Backup_ERP/$s/$d/code/layout /erp/*.*
ncftpput -m -u backup_erp -p 4r5t6y7u 192.168.20.8 /Backup_ERP/$s/$d/code/css /erp/css/*.*
ncftpput -m -u backup_erp -p 4r5t6y7u 192.168.20.8 /Backup_ERP/$s/$d/code/js /erp/js/*.*
ncftpput -m -u backup_erp -p 4r5t6y7u 192.168.20.8 /Backup_ERP/$s/$d/code/images /erp/images/*.*
ncftpput -m -u backup_erp -p 4r5t6y7u 192.168.20.8 /Backup_ERP/$s/$d/code/fonts /erp/fonts/*.*
ncftpput -R -m -u backup_erp -p 4r5t6y7u 192.168.20.8  /Backup_ERP/$s/$d/code/java /erp/WEB-INF
rm $2.sql

++++++++++++++++++++++++++++++++++++++++++++

cat backupWeek
#!/bin/bash
# Incremental Backup
d=`date +%y%m%d%H%M%S`
s=$1
mysqldump -ususer -pez=TopLevel $2 > /backup/$2.sql
mysqldump -ususer -pez=TopLevel erp > /backup/erp.sql
tar -jvcf /backup/$2.tar /backup/$2.sql
ncftpput -m -u backup_erp -p 4r5t6y7u 192.168.20.8 /Backup_ERP/$s/backup/$d/db_dump /backup/*.*
ncftpput -R -m -u backup_erp -p 4r5t6y7u 192.168.20.8 /Backup_ERP/$s/backup/$d/attachment /erp/$2
ncftpput -m -u backup_erp -p 4r5t6y7u 192.168.20.8 /Backup_ERP/$s/backup/$d/code/layout /erp/*.*
ncftpput -R -m -u backup_erp -p 4r5t6y7u 192.168.20.8 /Backup_ERP/$s/backup/$d/user /erp/user
ncftpput -m -u backup_erp -p 4r5t6y7u 192.168.20.8 /Backup_ERP/$s/backup/$d/code/css /erp/css/*.*
ncftpput -m -u backup_erp -p 4r5t6y7u 192.168.20.8 /Backup_ERP/$s/backup/$d/code/js /erp/js/*.*
ncftpput -m -u backup_erp -p 4r5t6y7u 192.168.20.8 /Backup_ERP/$s/backup/$d/code/images /erp/images/*.*
ncftpput -m -u backup_erp -p 4r5t6y7u 192.168.20.8 /Backup_ERP/$s/backup/$d/code/fonts /erp/fonts/*.*
ncftpput -R -m -u backup_erp -p 4r5t6y7u 192.168.20.8  /Backup_ERP/$s/backup/$d/code/java /erp/WEB-IN




2014年9月16日 星期二

linux dynamic mkdir folder

#!/bin/bash
# Incremental Backup
d=`date +%y%m%d%H%M%S`
file=" /backup/entities/incremental_backup/$d":
mkdir $file
DIST=/source/entitles/folder/data/
DIST_OLD=/FullBackup/Entities/Folder/Data/
DIST_UPGRADE=$file
cd $DIST
list=`find . -type f`
for a in $list; do
if [ ! -f "$DIST_OLD$a" ]; then
cp -vpf --parents $a $DIST_UPGRADE
continuevi
fi
diff -a $a $DIST_OLD$a > /dev/null
if [[ "$?" == "1" ]]; then
echo copying
# File exists but is different so copy changed file
cp -vpf --parents $a $DIST_UPGRADE
fi
done

2014年9月14日 星期日

mysql-mysqldump

mysqldump --host=ipaddress \
    --user=username \
    --password=password \
    db_name table_name \
    --where="id>2500000

linux ftp ncftpget


apt-get install ncftp



dos ftp

script.txt
  • open ftp.domain.com
  • username
  • password
  • cd public_html
  • dir
  • get file.txt
  • bye
or
  • open ftp.domain.com
  • username
  • password
  • cd public_html
  • dir
  • prompt off
  • mget *.*
  • bye
ftp -s:script.txt

Linux Cron

linux-cron

Linux Scheduled Jobs is scheduler system know as Cron that allows system administrators to automate rescurring administrative tasks or users to automate their routine works.



There are 2 types of Scheduled Jobs:

1. User Scheduled Jobs - Which is set by a non-root user and runs under a non-root user's privilege.
2. System Scheduled Jobs - Which is set by root and runs under Super User's Privilege.

The scheduler system includes the following components.
1. The Cron Daemon - crond(8)
2. The Configuration file - /etc/crontab (5) and /var/spool/cron/*
3. The Command - crontab(1)

The Cron Daemon - crond(8)

The Cron Daemon is the primary server service for all types of scheduled jobs. For every minute, it reads all configuration files including system scheduled jobs from 'etc/crontab' and all users scheduled jobs from 'var/spool/cron/*. If it finds jobs that needs to be run at that time, it will run them.

Since the above operations is performed every minute, therefore the unit of time in the configuration is in minute. In other words, you can schedule a job to run every minute but not every second.

The Configuration files

The configuration file 'etc/contab' is used for storing system scheduled jobs and can be edited directly by SuperUser only.

The configuration file under the directory '/var/spool/cron' are used for storing users' scheduled jobs.
For example :
The scheduled jobs for the user peter are stored in the file /var/spool/cron/peter.
The scheduled jobs for the user ada are stored in the file /var/spool/cron/ada.

However, users can only edit them through the command 'crontab' and they only edit their own file.

The Crontab files
Min Hour Day Month Weekday RunAs (/etc/crontab only)  Command(Action)
* * * * * root command i.e. every minute
0 * * * * root command i.e. every hour
0 12,13 * * * root command i.e. 12:00 13:00
15 2 1 * * root command i.e. 1st  day of every month at 2:15
15 4 */3 * * root command i.e. every 3 days at 4:15

Scheduling User CronJobs

For users to maintenance their scheduled jobs, they need to use the 'crontab' command. There a few options available in the 'crontab' command:

crontab -e to edit user's own cronjob  // crontab -u peter -e  <-- Root
crontab -l to list the usr's own cronjob // crontab -u peter -l <-- Root
crontab -r to remove the usr's own cronjob // crontab -u peter -r  <-- Root

e.g. peter edit his own job

crontab -e
* * * * * date >> /home/peter/datafile
ESC :wq

The cron job file for Peter is created but Peter cannot view it directory.
Normal users has no right in accessing the directory '/var/spool/cron' and can only access it through 'crontab' command what has SetUID bit configured.

To view his own cronjob:  crontab -l

ps -fC cron(d)
chkconfig --list cron(d)
chkconfig corn(d) status
chkconfig --level 2345 cron(d) on
service cron(d) restart

Scheduling System Cronjobs

vi /etc/contab

SHELL=/bin/bash
PATH=/sbin:/bin:/usr/sbin:/usr/bin
MAILTO=root
HOME=/
01 * * * * root run-parts /etc/cron.hourly
02 4 * * * root run-parts /etc/cron.daily
22 4 * * * root run-parts /etc/cron.weekly
41 1 1 * * root run-parts /etc/cron.monthly

Actual 'run-parts' is a script that reads and run the script file in the specified directory (i.e. '/etc/cron.daily').

Note that all entries use different values in the minute column. If not, jobs from diffrent directories will be started at the same time. These will seriously affect the performance of the Linux machine durring that period.


In order to configure system cron jobs, you can

Create an entry directly in the /etc/crontab file e.g.

* * * * * root date>> /root/datefile
 or

Prepare a script file and then put it into the appropriate directory. e.g. for a script file that needs to be run monthly, put it into the directory '/etc/cron.monthly'.