app.configure(function(){
app.use(app.router);
// the following middlewares are only necessary for the mounted 'dashboard' app,
// but express needs it on the parent app (?) and it therefore pollutes the api
app.use(express.bodyParser());
app.use(express.methodOverride());
app.use(express.cookieParser('Z5V45V6B5U56B7J5N67J5VTH345GC4G5V4'));
app.use(express.cookieSession({
key: 'uptime',
secret: 'FZ5HEE5YHD3E566756234C45BY4DSFZ4',
proxy: true,
cookie: { maxAge: 60 * 60 * 1000 }
}));
app.use(function (req, res, next) {
// Website you wish to allow to connect
res.setHeader('Access-Control-Allow-Origin', '*');
// Request methods you wish to allow
res.setHeader('Access-Control-Allow-Methods', 'GET, POST, OPTIONS, PUT, PATCH, DELETE');
// Request headers you wish to allow
res.setHeader('Access-Control-Allow-Headers', 'X-Requested-With,content-type');
// Set to true if you need the website to include cookies in the requests sent
// to the API (e.g. in case you use sessions)
res.setHeader('Access-Control-Allow-Credentials', true);
// Pass to next layer of middleware
next();
});
app.set('pollerCollection', new PollerCollection());
});
Saturday, December 5, 2015
node.js app enable cors
Wednesday, November 25, 2015
systemctl daemon-reload
562 systemctl status elasticsearch.service
563 systemctl daemon-reload
564 systemctl status elasticsearch.service
565 vim /etc/systemd/system/multi-user.target.wants/elasticsearch.service
566 systemctl daemon-reload
567 systemctl status elasticsearch.service
568 service elasticsearch start
569 history
570 systemctl disable elasticsearch
571 systemctl enable elasticsearch
572 systemctl status elasticsearch.service
573 less /usr/lib/systemd/system/elasticsearch.service
574 vim /etc/systemd/system/multi-user.target.wants/elasticsearch.service
575 vim /etc/init.d/elasticsearch
576 systemctl status elasticsearch.service
577 systemctl daemon-reload
578 systemctl status elasticsearch.service
579 vim /etc/init.d/elasticsearch
580 systemctl status elasticsearch.service
581 systemctl daemon-reload
582 systemctl status elasticsearch.service
583 history
584 systemctl disable elasticsearch
585 systemctl enable elasticsearch
586 systemctl status elasticsearch.service
587 ps -ef | grep el
588 service elasticsearch start
589 service elasticsearch status
590 ps -ef | grep el
563 systemctl daemon-reload
564 systemctl status elasticsearch.service
565 vim /etc/systemd/system/multi-user.target.wants/elasticsearch.service
566 systemctl daemon-reload
567 systemctl status elasticsearch.service
568 service elasticsearch start
569 history
570 systemctl disable elasticsearch
571 systemctl enable elasticsearch
572 systemctl status elasticsearch.service
573 less /usr/lib/systemd/system/elasticsearch.service
574 vim /etc/systemd/system/multi-user.target.wants/elasticsearch.service
575 vim /etc/init.d/elasticsearch
576 systemctl status elasticsearch.service
577 systemctl daemon-reload
578 systemctl status elasticsearch.service
579 vim /etc/init.d/elasticsearch
580 systemctl status elasticsearch.service
581 systemctl daemon-reload
582 systemctl status elasticsearch.service
583 history
584 systemctl disable elasticsearch
585 systemctl enable elasticsearch
586 systemctl status elasticsearch.service
587 ps -ef | grep el
588 service elasticsearch start
589 service elasticsearch status
590 ps -ef | grep el
Monday, November 16, 2015
hadoop 2.7.1 single node cluster
root@hadoop-master:/myhadoop# export JAVA_HOME=/usr/lib/jvm/java-8-oracle/
root@hadoop-master:/myhadoop# export HADOOP\_PREFIX=/myhadoop/hadoop-2.7.1
root@hadoop-master:/myhadoop# export HADOOP_CONF_DIR=$HADOOP_PREFIX/etc/hadoop
root@hadoop-master:~# vim /myhadoop/hadoop-2.7.1/etc/hadoop/hadoop-env.sh
If you want to change it globally and at system level;
root@hadoop-master:~# vim /etc/environment
root@hadoop-master:/myhadoop# export HADOOP\_PREFIX=/myhadoop/hadoop-2.7.1
root@hadoop-master:/myhadoop# export HADOOP_CONF_DIR=$HADOOP_PREFIX/etc/hadoop
root@hadoop-master:~# vim /myhadoop/hadoop-2.7.1/etc/hadoop/hadoop-env.sh
#export JAVA_HOME=${JAVA_HOME}
export JAVA_HOME=/usr/lib/jvm/java-8-oracle/
If you want to change it globally and at system level;
root@hadoop-master:~# vim /etc/environment
PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games"
export JAVA_HOME=/usr/lib/jvm/java-8-oracle/
Thursday, November 12, 2015
hadoop on ubunte with java 8
1, install java 8
sudo add-apt-repository ppa:webupd8team/java
sudo apt-get update
sudo apt-get install oracle-java8-installer
sudo add-apt-repository ppa:webupd8team/java
sudo apt-get update
sudo apt-get install oracle-java8-installer
2, export java
export JAVA_HOME=/usr/lib/jvm/java-8-oracle/
3, download hadoop
wget http://apache.arvixe.com/hadoop/common/stable/hadoop-2.7.1.tar.gz
4, copy xml file
cp hadoop-2.7.1/etc/hadoop/*.xml input/
5, run hadoop
hadoop-2.7.1/bin/hadoop jar hadoop-2.7.1/share/hadoop/mapreduce/hadoop-mapreduce-examples-2.7.1.jar grep input output 'dfs[a-z.]+'
Wednesday, September 23, 2015
docket rabbitmq
root@sam-clm2:~# docker exec 7931dc7e8611 /usr/sbin/rabbitmq-plugins enable rabbitmq_management
The following plugins have been enabled:
mochiweb
webmachine
rabbitmq_web_dispatch
amqp_client
rabbitmq_management_agent
rabbitmq_management
Applying plugin configuration to rabbit@ms-rabbit... started 6 plugins.
root@sam-clm2:~#
wget http://guest:guest@localhost:15672/cli/rabbitmqadmin
mv rabbitmqadmin.2 /usr/local/bin/rabbitmqadmin
chmod +x /usr/local/bin/rabbitmqadmin
rabbitmqadmin list exchanges
root@sam-clm2:~# rabbitmqadmin declare exchange name=exchange_Office type=direct
exchange declared
root@sam-clm2:~# rabbitmqadmin declare exchange name=exchange_PE type=direct
exchange declared
root@sam-clm2:~# rabbitmqadmin declare exchange name=exchange_JS type=direct
exchange declared
root@sam-clm2:~# rabbitmqadmin declare exchange name=exchange_ALL type=direct
exchange declared
root@sam-clm2:~# rabbitmqadmin declare exchange name=exchange_PDF type=direct
root@sam-clm2:~# docker exec 7931dc7e8611 /usr/sbin/rabbitmq-plugins list
Configured: E = explicitly enabled; e = implicitly enabled
| Status: * = running on rabbit@ms-rabbit
|/
[e*] amqp_client 3.5.4
[ ] cowboy 0.5.0-rmq3.5.4-git4b93c2d
[e*] mochiweb 2.7.0-rmq3.5.4-git680dba8
[ ] rabbitmq_amqp1_0 3.5.4
[ ] rabbitmq_auth_backend_ldap 3.5.4
[ ] rabbitmq_auth_mechanism_ssl 3.5.4
[ ] rabbitmq_consistent_hash_exchange 3.5.4
[ ] rabbitmq_federation 3.5.4
[ ] rabbitmq_federation_management 3.5.4
[E*] rabbitmq_management 3.5.4
[e*] rabbitmq_management_agent 3.5.4
[ ] rabbitmq_management_visualiser 3.5.4
[ ] rabbitmq_mqtt 3.5.4
[ ] rabbitmq_shovel 3.5.4
[ ] rabbitmq_shovel_management 3.5.4
[ ] rabbitmq_stomp 3.5.4
[ ] rabbitmq_test 3.5.4
[ ] rabbitmq_tracing 3.5.4
[e*] rabbitmq_web_dispatch 3.5.4
[ ] rabbitmq_web_stomp 3.5.4
[ ] rabbitmq_web_stomp_examples 3.5.4
[ ] sockjs 0.3.4-rmq3.5.4-git3132eb9
[e*] webmachine 1.10.3-rmq3.5.4-gite9359c7
ang@sam-clm2:~$ rabbitmqadmin list exchanges
+--------------------+---------+
| name | type |
+--------------------+---------+
| | direct |
| amq.direct | direct |
| amq.fanout | fanout |
| amq.headers | headers |
| amq.match | headers |
| amq.rabbitmq.log | topic |
| amq.rabbitmq.trace | topic |
| amq.topic | topic |
| exchange_ALL | direct |
| exchange_JS | direct |
| exchange_Office | direct |
| exchange_PDF | direct |
| exchange_PE | direct |
+--------------------+---------+
ang@sam-clm2:~$ rabbitmqadmin list queues
+------------------+----------+
| name | messages |
+------------------+----------+
| queue_ALL_new | 0 |
| queue_JS_new | 0 |
| queue_Office_new | 0 |
| queue_PDF_new | 0 |
| queue_PE_new | 0 |
+------------------+----------+
The following plugins have been enabled:
mochiweb
webmachine
rabbitmq_web_dispatch
amqp_client
rabbitmq_management_agent
rabbitmq_management
Applying plugin configuration to rabbit@ms-rabbit... started 6 plugins.
root@sam-clm2:~#
wget http://guest:guest@localhost:15672/cli/rabbitmqadmin
mv rabbitmqadmin.2 /usr/local/bin/rabbitmqadmin
chmod +x /usr/local/bin/rabbitmqadmin
rabbitmqadmin list exchanges
root@sam-clm2:~# rabbitmqadmin declare exchange name=exchange_Office type=direct
exchange declared
root@sam-clm2:~# rabbitmqadmin declare exchange name=exchange_PE type=direct
exchange declared
root@sam-clm2:~# rabbitmqadmin declare exchange name=exchange_JS type=direct
exchange declared
root@sam-clm2:~# rabbitmqadmin declare exchange name=exchange_ALL type=direct
exchange declared
root@sam-clm2:~# rabbitmqadmin declare exchange name=exchange_PDF type=direct
root@sam-clm2:~# docker exec 7931dc7e8611 /usr/sbin/rabbitmq-plugins list
Configured: E = explicitly enabled; e = implicitly enabled
| Status: * = running on rabbit@ms-rabbit
|/
[e*] amqp_client 3.5.4
[ ] cowboy 0.5.0-rmq3.5.4-git4b93c2d
[e*] mochiweb 2.7.0-rmq3.5.4-git680dba8
[ ] rabbitmq_amqp1_0 3.5.4
[ ] rabbitmq_auth_backend_ldap 3.5.4
[ ] rabbitmq_auth_mechanism_ssl 3.5.4
[ ] rabbitmq_consistent_hash_exchange 3.5.4
[ ] rabbitmq_federation 3.5.4
[ ] rabbitmq_federation_management 3.5.4
[E*] rabbitmq_management 3.5.4
[e*] rabbitmq_management_agent 3.5.4
[ ] rabbitmq_management_visualiser 3.5.4
[ ] rabbitmq_mqtt 3.5.4
[ ] rabbitmq_shovel 3.5.4
[ ] rabbitmq_shovel_management 3.5.4
[ ] rabbitmq_stomp 3.5.4
[ ] rabbitmq_test 3.5.4
[ ] rabbitmq_tracing 3.5.4
[e*] rabbitmq_web_dispatch 3.5.4
[ ] rabbitmq_web_stomp 3.5.4
[ ] rabbitmq_web_stomp_examples 3.5.4
[ ] sockjs 0.3.4-rmq3.5.4-git3132eb9
[e*] webmachine 1.10.3-rmq3.5.4-gite9359c7
ang@sam-clm2:~$ rabbitmqadmin list exchanges
+--------------------+---------+
| name | type |
+--------------------+---------+
| | direct |
| amq.direct | direct |
| amq.fanout | fanout |
| amq.headers | headers |
| amq.match | headers |
| amq.rabbitmq.log | topic |
| amq.rabbitmq.trace | topic |
| amq.topic | topic |
| exchange_ALL | direct |
| exchange_JS | direct |
| exchange_Office | direct |
| exchange_PDF | direct |
| exchange_PE | direct |
+--------------------+---------+
ang@sam-clm2:~$ rabbitmqadmin list queues
+------------------+----------+
| name | messages |
+------------------+----------+
| queue_ALL_new | 0 |
| queue_JS_new | 0 |
| queue_Office_new | 0 |
| queue_PDF_new | 0 |
| queue_PE_new | 0 |
+------------------+----------+
install rabbit-mq in ubuntu
1, vim /etc/apt/sources.list
Add the following line to your /etc/apt/sources.list:
2, wget https://www.rabbitmq.com/rabbitmq-signing-key-public.asc
3, apt-key add rabbitmq-signing-key-public.asc
4, apt-get update
5, apt-get install rabbitmq-server
6, apt-get -f install
61 rabbitmqadmin list exchanges
62 rabbitmqadmin list exchanges -u guest -p guest
63 rabbitmq-plugins list
64 rabbitmq-plugins enable rabbitmq_management
65 rabbitmq-plugins list
66 rabbitmqadmin list exchanges -u guest -p guest
67 rabbitmqadmin list exchanges
68 rabbitmqadmin list queues
69 history
Add the following line to your /etc/apt/sources.list:
deb http://www.rabbitmq.com/debian/ testing main
2, wget https://www.rabbitmq.com/rabbitmq-signing-key-public.asc
3, apt-key add rabbitmq-signing-key-public.asc
4, apt-get update
5, apt-get install rabbitmq-server
6, apt-get -f install
61 rabbitmqadmin list exchanges
62 rabbitmqadmin list exchanges -u guest -p guest
63 rabbitmq-plugins list
64 rabbitmq-plugins enable rabbitmq_management
65 rabbitmq-plugins list
66 rabbitmqadmin list exchanges -u guest -p guest
67 rabbitmqadmin list exchanges
68 rabbitmqadmin list queues
69 history
Tuesday, August 11, 2015
install qpid on ubuntu 14
# install qpid
apt-get install qpidd qpid-doc qpid-client python-qpid-extras-qmf python-qpid python-cqpid
wget http://apache.mirrors.tds.net/qpid/0.32/qpid-tools-0.32.tar.gz
tar zxvf qpid-tools-0.32.tar.gz
cd qpid-tools-0.32/
./setup.py build
./setup.py install
apt-get install qpidd qpid-doc qpid-client python-qpid-extras-qmf python-qpid python-cqpid
wget http://apache.mirrors.tds.net/qpid/0.32/qpid-tools-0.32.tar.gz
tar zxvf qpid-tools-0.32.tar.gz
cd qpid-tools-0.32/
./setup.py build
./setup.py install
sub-process /usr/bin/dpkg returned an error code (1) ubuntu apache2 (OpsWorks)
When do
apt-get install apache2
Solution: do these first:
apt-get remove --purge apache2
apt-get install -f
apt-get autoremove
may need if you remove apache2 got same issue:
umount /var/log/apache2
Then:
apt-get install apache2
apt-get install apache2
got error:
sub-process /usr/bin/dpkg returned an error code (1) ubuntu apache2Solution: do these first:
apt-get remove --purge apache2
apt-get install -f
apt-get autoremove
umount /var/log/apache2
umount /var/www
apt-get install apache2
Friday, August 7, 2015
ES AWS
updatedb
apt-get update
apt-get upgrade
add-apt-repository ppa:webupd8team/java
apt-get update
wget https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch-1.7.1.deb
dpkg -i elasticsearch-1.7.1.deb
apt-get install oracle-java8-installer
cd /usr/share/elasticsearch
bin/plugin install elasticsearch/elasticsearch-cloud-aws/2.7.0
cat /dev/null > /etc/elasticsearch/elasticsearch.yml
vim /etc/elasticsearch/elasticsearch.yml
/etc/init.d/elasticsearch start
/etc/init.d/elasticsearch status
apt-get update
apt-get upgrade
add-apt-repository ppa:webupd8team/java
apt-get update
wget https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch-1.7.1.deb
dpkg -i elasticsearch-1.7.1.deb
apt-get install oracle-java8-installer
cd /usr/share/elasticsearch
bin/plugin install elasticsearch/elasticsearch-cloud-aws/2.7.0
cat /dev/null > /etc/elasticsearch/elasticsearch.yml
vim /etc/elasticsearch/elasticsearch.yml
/etc/init.d/elasticsearch start
/etc/init.d/elasticsearch status
Monday, July 27, 2015
Friday, July 10, 2015
enable https (comodo cert) for apache2 & ubuntu 14
1, make xxxxxxxxxxxx.ca-bundle file
cat AddTrustExternalCARoot.crt COMODORSAAddTrustCA.crt COMODORSADomainValidationSecureServerCA.crt yourdomain_com.crt > yourdomain_com.ca-bundle
cat AddTrustExternalCARoot.crt COMODORSAAddTrustCA.crt COMODORSADomainValidationSecureServerCA.crt yourdomain_com.crt > yourdomain_com.ca-bundle
2, create cert folder under /etc/apache2/cert
3, copy all file from comodo to this folder
root@cnew:/etc/apache2/cert# ll
total 36
drwxr-xr-x 2 root root 4096 Jul 10 23:15 ./
drwxr-xr-x 9 root root 4096 Jul 10 21:34 ../
-rw-r--r-- 1 root root 1521 Jun 28 07:31 AddTrustExternalCARoot.crt
-rw-r--r-- 1 root root 7515 Jul 10 23:15 yourdomain_com.ca-bundle
-rw-r--r-- 1 root root 1891 Jun 28 07:31 yourdomain_com.crt
-rw-r--r-- 1 root root 1675 Jun 28 07:31 yourdomain.key
-rw-r--r-- 1 root root 1952 Jun 28 07:31 COMODORSAAddTrustCA.crt
-rw-r--r-- 1 root root 2151 Jun 28 07:31 COMODORSADomainValidationSecureServerCA.crt
4: vim /etc/apache2/sites-available/default-ssl.conf
SSLCertificateFile /etc/apache2/cert/yourdomain_com.crt
SSLCertificateKeyFile /etc/apache2/cert/yourdomain_com.key
SSLCertificateChainFile /etc/apache2/cert/yourdomain_com.ca-bundle
5: enable ssl mod and ssl site
a2enmod ssl
a2ensite default-ssl.conf
service apache2 restart
Monday, July 6, 2015
sqlr-listener error:
ERROR:
sqlr-listener error:
Unable to create a shared memory segment. This is usally because an
sqlr-listener is already running for the rovi instance.
If it is not running, something may have crashed and left an old segment
lying around. Use the ipcs command to inspect existing shared memory
segments and the ipcrm command to remove the shared memory segment with
id -1.
Error was: File exists
sqlr-connection error:
The pid file /usr/local/var/sqlrelay/tmp/pids/sqlr-listener-rovi was not found.
This usually means that the sqlr-listener
is not running.
The sqlr-listener must be running for the sqlr-connection to start.
FIX:
Step 1: get shared memory info:
root@devtest:~# ipcs
------ Shared Memory Segments --------
key shmid owner perms bytes nattch status
0x0101520c 385941504 ubuntu 640 679968 7
0x01015379 385974273 nobody 640 679968 3
------ Semaphore Arrays --------
key semid owner perms nsems
0x0101520c 9437184 ubuntu 600 13
0x01015379 9469953 nobody 600 13
------ Message Queues --------
key msqid owner perms used-bytes messages
Step 2: remove shared memory
ipcrm -m 385941504
sqlr-listener error:
Unable to create a shared memory segment. This is usally because an
sqlr-listener is already running for the rovi instance.
If it is not running, something may have crashed and left an old segment
lying around. Use the ipcs command to inspect existing shared memory
segments and the ipcrm command to remove the shared memory segment with
id -1.
Error was: File exists
sqlr-connection error:
The pid file /usr/local/var/sqlrelay/tmp/pids/sqlr-listener-rovi was not found.
This usually means that the sqlr-listener
is not running.
The sqlr-listener must be running for the sqlr-connection to start.
FIX:
Step 1: get shared memory info:
root@devtest:~# ipcs
------ Shared Memory Segments --------
key shmid owner perms bytes nattch status
0x0101520c 385941504 ubuntu 640 679968 7
0x01015379 385974273 nobody 640 679968 3
------ Semaphore Arrays --------
key semid owner perms nsems
0x0101520c 9437184 ubuntu 600 13
0x01015379 9469953 nobody 600 13
------ Message Queues --------
key msqid owner perms used-bytes messages
Step 2: remove shared memory
ipcrm -m 385941504
ipcrm -m 385974273
Step 3:
start sqlr:
sqlr-start -id abc
sqlr-start -id efg
Step 3:
start sqlr:
sqlr-start -id abc
sqlr-start -id efg
Friday, July 3, 2015
install ssdeep in ubuntu
sudo apt-get install build-essential libffi-dev python python-dev python-pip automake autoconf libtool
sudo BUILD_LIB=1 pip install ssdeep
root@SBGF:~/Downloads# ssdeep VirusShare_94e75b0bc1398e516e2f4d525cd9e2d1
ssdeep,1.1--blocksize:hash:hash,filename
393216:wfk5JDNLkgFEXwj38789F+GzKIVSVer6hiM4hjaf8IZa6:9r2nwjZ9FzKIkRYWPw6,"/home/chang/Downloads/VirusShare_94e75b0bc1398e516e2f4d525cd9e2d1"
root@SBGF:~/Downloads# ssdeep VirusShare_a985e7236a60d3b6027ee33124cfbd2c
ssdeep,1.1--blocksize:hash:hash,filename
49152:zUZWIDo67rsgVd7XWsPru4kMjc94tj8GskIt7qZi9qOu7aa:OV/sgVdbWsrkMw94tj8bkqqOu7,"/home/chang/Downloads/VirusShare_a985e7236a60d3b6027ee33124cfbd2c"
sudo BUILD_LIB=1 pip install ssdeep
root@SBGF:~/Downloads# ssdeep VirusShare_94e75b0bc1398e516e2f4d525cd9e2d1
ssdeep,1.1--blocksize:hash:hash,filename
393216:wfk5JDNLkgFEXwj38789F+GzKIVSVer6hiM4hjaf8IZa6:9r2nwjZ9FzKIkRYWPw6,"/home/chang/Downloads/VirusShare_94e75b0bc1398e516e2f4d525cd9e2d1"
root@SBGF:~/Downloads# ssdeep VirusShare_a985e7236a60d3b6027ee33124cfbd2c
ssdeep,1.1--blocksize:hash:hash,filename
49152:zUZWIDo67rsgVd7XWsPru4kMjc94tj8GskIt7qZi9qOu7aa:OV/sgVdbWsrkMw94tj8bkqqOu7,"/home/chang/Downloads/VirusShare_a985e7236a60d3b6027ee33124cfbd2c"
chang@SBGF:~/Downloads/9233160510$ ssdeep VirusShare_94e75b0bc1398e516e2f4d525cd9e2d1
ssdeep,1.1--blocksize:hash:hash,filename
393216:wfk5JDNLkgFEXwj38789F+GzKIVSVer6hiM4hjaf8IZa6:9r2nwjZ9FzKIkRYWPw6,"/home/chang/Downloads/9233160510/VirusShare_94e75b0bc1398e516e2f4d525cd9e2d1"
chang@SBGF:~/Downloads/9233160510$ ssdeep VirusShare_a985e7236a60d3b6027ee33124cfbd2c
ssdeep,1.1--blocksize:hash:hash,filename
49152:zUZWIDo67rsgVd7XWsPru4kMjc94tj8GskIt7qZi9qOu7aa:OV/sgVdbWsrkMw94tj8bkqqOu7,"/home/chang/Downloads/9233160510/VirusShare_a985e7236a60d3b6027ee33124cfbd2c"
chang@SBGF:~/Downloads/9233160510$ python
Python 2.7.11+ (default, Apr 17 2016, 14:00:29)
[GCC 5.3.1 20160413] on linux2
Type "help", "copyright", "credits" or "license" for more information.
>>> iport ssdeep
File "<stdin>", line 1
iport ssdeep
^
SyntaxError: invalid syntax
>>> import ssdeep
>>> ssdeep.compare('393216:wfk5JDNLkgFEXwj38789F+GzKIVSVer6hiM4hjaf8IZa6:9r2nwjZ9FzKIkRYWPw6', '49152:zUZWIDo67rsgVd7XWsPru4kMjc94tj8GskIt7qZi9qOu7aa:OV/sgVdbWsrkMw94tj8bkqqOu7')
0
Thursday, April 9, 2015
1: root@es-master1z:~# curl -XGET 'http://localhost:9200/gold*/_mapping?pretty'
2: curl -XPUT 'http://localhost:9200/smarttvsysinfo/'
3:
curl -XPUT 'http://localhost:9200/soci*/tweets/_mapping' -d '{"_ttl": {"enabled": true, "default": 5000}}'
3:
curl -XPUT 'http://localhost:9200/_template/template_1' -d '
{
"template" : "gold*",
"settings" : {
"analysis" : {
"filter" : {
"tweet_filter" : {
"type" : "word_delimiter",
"split_on_case_change":"false",
"split_on_numerics":"false",
"type_table": ["# => ALPHA", "@ => ALPHA", "_ => ALPHA"]
}
},
"analyzer" : {
"tweet_analyzer" : {
"type" : "custom",
"tokenizer" : "whitespace",
"filter" : ["lowercase", "tweet_filter"]
}
}
}
},
"mappings" : {
"_default_" : {
"_ttl" : { "enabled" : true, "default": 180002 } ,
"properties" : {
"text" : {
"type" : "string",
"analyzer" : "tweet_analyzer"
},
"retweeted_status" : {
"properties" : {
"text" : {
"type" : "string",
"analyzer" : "tweet_analyzer"
}
}
},
"user" : {
"properties" : {
"screen_name" : {
"type" : "string",
"analyzer" : "tweet_analyzer"
}
}
},
"in_reply_to_screen_name" : {
"type" : "string",
"analyzer" : "tweet_analyzer"
}
}
}
}
}
'
{
"template" : "gold*",
"settings" : {
"analysis" : {
"filter" : {
"tweet_filter" : {
"type" : "word_delimiter",
"split_on_case_change":"false",
"split_on_numerics":"false",
"type_table": ["# => ALPHA", "@ => ALPHA", "_ => ALPHA"]
}
},
"analyzer" : {
"tweet_analyzer" : {
"type" : "custom",
"tokenizer" : "whitespace",
"filter" : ["lowercase", "tweet_filter"]
}
}
}
},
"mappings" : {
"_default_" : {
"_ttl" : { "enabled" : true, "default": 180002 } ,
"properties" : {
"text" : {
"type" : "string",
"analyzer" : "tweet_analyzer"
},
"retweeted_status" : {
"properties" : {
"text" : {
"type" : "string",
"analyzer" : "tweet_analyzer"
}
}
},
"user" : {
"properties" : {
"screen_name" : {
"type" : "string",
"analyzer" : "tweet_analyzer"
}
}
},
"in_reply_to_screen_name" : {
"type" : "string",
"analyzer" : "tweet_analyzer"
}
}
}
}
}
'
Friday, March 27, 2015
elasticsearch create index and add documents
1, Create Index
curl -XPUT 'http://localhost:9200/twitter/'
curl -XPUT 'http://localhost:9200/twitter/'
2, add docs:
curl -XPUT 'http://localhost:9200/twitter/tweet/22' -d '{
"user" : "kimchy22",
"post_date" : "2015-03-27T12:12:12",
"message" : "trying out Elasticsearch"
}'
curl -XPUT 'http://localhost:9200/twitter/tweet/21' -d '{
"user" : "kimchy21",
"post_date" : "2015-03-27T11:12:12",
"message" : "trying out Elasticsearch"
}'
Thursday, March 26, 2015
curl search elasticsearch
curl -XPOST 'http://10.0.1.152:9200/logstash-*/_search?pretty' -d '
{
"query": {
"filtered": {
"query": {
"query_string": {
"query": "*listing*"
}
}
}
},
"fields": [
"code",
"time_taken",
"uri_path"
], "from": 0,
"size": 1,
"sort": {
"_score": {
"order": "asc"
}
},
"explain": true
}
'
{
"query": {
"filtered": {
"query": {
"query_string": {
"query": "*listing*"
}
}
}
},
"fields": [
"code",
"time_taken",
"uri_path"
], "from": 0,
"size": 1,
"sort": {
"_score": {
"order": "asc"
}
},
"explain": true
}
'
Monday, March 16, 2015
ubuntu 14 64 bits install ELK - March 2015.
1, install jdk
apt-get update
apt-get install openjdk-7-jdk
5, Add kibana into startup service
apt-get update
apt-get upgrade
2, install elasticsearch-1.4.4
wget https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-1.4.4.deb
dpkg -i elasticsearch-1.4.4.deb
3, install logstash_1.4.2
3, install logstash_1.4.2
dpkg -i logstash_1.4.2-1-2c0f5a1_all.deb
Create conf file:
vim /etc/logstash/conf.d/test.conf
4, install kibana-4.0.1
wget https://download.elasticsearch.org/kibana/kibana/kibana-4.0.1-linux-x64.tar.gz
Create conf file:
vim /etc/logstash/conf.d/test.conf
input {
file {
type => "tomcat_access"
path => [ "/home/ubuntu/elk/samplelogs/*.log" ]
start_position => "beginning"
}
}
filter {
if [type] == "tomcat_access" {
grok{
patterns_dir => "./patterns"
match => [ "message", '%{IPORHOST:client} %{DATA} %{DATA:user} \[%{DATA:timestamp} %{ISO8601_TIMEZONE:timezone}\] \"%{WORD:method} %{URIPATH:uri_path}(%{URIPARAM:params}|) %{DATA:protocol}\" %{NUMBER:code} (%{NUMBER:bytes}|%{DATA}) %{NUMBER:time_taken}' ]
overwrite => [ "message" ]
}
date {
locale => "en"
match => [ "timestamp", "dd/MMM/yyyy:HH:mm:ss" ]
timezone => "UTC"
target => "logTimestamp"
}
mutate {
convert => [ "code", "integer" ]
convert => [ "time_taken", "integer" ]
}
}
}
output {
stdout {
codec => rubydebug
}
elasticsearch {
host => "localhost"
protocol => "http"
port => "9200"
node_name => "elk-test"
}
}
4, install kibana-4.0.1
wget https://download.elasticsearch.org/kibana/kibana/kibana-4.0.1-linux-x64.tar.gz
tar zxvf kibana-4.0.1-linux-x64.tar.gz
mv kibana-4.0.1-linux-x64 /opt/
5, Add kibana into startup service
6, run services:
/etc/init.d/elasticsearch start
/etc/init.d/logstash start
/etc/init.d/kibana4 start
7, access kibana
http://x.x.x.x:5601/
If you cannot open this page, try to open the port 5601 from your firewall.
Friday, March 6, 2015
delete elasticsearch logstash index automatically
1, Wrote a shell script to delete two days old index.
dave@cm-log-manager:~/tools$ cat delete_ES_index.sh
#!/bin/bash
yesterday=`/bin/date --date="-1 days" +%Y.%m.%d`
echo $yesterday
twodayago=`/bin/date --date="-2 days" +%Y.%m.%d`
echo $twodayago
curl -XDELETE "http://localhost:9200/logstash-$twodayago"
threedayago=`/bin/date --date="-3 days" +%Y.%m.%d`
echo $threedayago
curl -XDELETE "http://localhost:9200/logstash-$threedayago"
fourdayago=`/bin/date --date="-4 days" +%Y.%m.%d`
echo $fourdayago
curl -XDELETE "http://localhost:9200/logstash-$fourdayago"
fivedayago=`/bin/date --date="-5 days" +%Y.%m.%d`
echo $fivedayago
curl -XDELETE "http://localhost:9200/logstash-$fivedayago"
dave@cm-log-manager:~/tools$ cat delete_ES_index.sh
yesterday=`/bin/date --date="-1 days" +%Y.%m.%d`
echo $yesterday
twodayago=`/bin/date --date="-2 days" +%Y.%m.%d`
echo $twodayago
curl -XDELETE "http://localhost:9200/logstash-$twodayago"
threedayago=`/bin/date --date="-3 days" +%Y.%m.%d`
echo $threedayago
curl -XDELETE "http://localhost:9200/logstash-$threedayago"
fourdayago=`/bin/date --date="-4 days" +%Y.%m.%d`
echo $fourdayago
curl -XDELETE "http://localhost:9200/logstash-$fourdayago"
fivedayago=`/bin/date --date="-5 days" +%Y.%m.%d`
echo $fivedayago
curl -XDELETE "http://localhost:9200/logstash-$fivedayago"
2, Add this script to cronjob
1 23 * * * /home/dave/tools/delete_ES_index.sh
3, now only two days index left
Thursday, March 5, 2015
install jsunpack-n on ubuntu 32/64 bits
0:
apt-get update
apt-get upgrade
apt-get install build-essential make patch gettext gcc g++ automake autoconf libtool flex bison libglib2.0-dev libnet1-dev
apt-get install linux-headers-$(uname -r) build-essential make patch gettext gcc g++ automake autoconf libtool flex bison libglib2.0-dev libnet-dev
1, install js
apt-get install libmozjs*
2, install pynids
apt-get update
apt-get upgrade
apt-get install build-essential make patch gettext gcc g++ automake autoconf libtool flex bison libglib2.0-dev libnet1-dev
apt-get install libmozjs*
ln -s /usr/bin/js24 /usr/bin/js
2, install pynids
apt-get install libpcap-dev pkg-config python-dev libgtk2.0-dev libnet1-dev libnids1.21 libnids-dev $ sudo ldconfig
wget https://jon.oberheide.org/pynids/downloads/pynids-0.6.1.tar.gz --no-check-certificate
tar zxvf pynids-0.6.1.tar.gz
cd pynids-0.6.1/
python setup.py build
python setup.py install
3, install yara
apt-get install yara
apt-get install python-yara python-magic
4 Build and install BeautifulSoup from ./depends/BeautifulSoup-3.2.0.tar.gz
(alternatively from the pypi http://pypi.python.org/pypi/BeautifulSoup/3.2.0)
$ cd depends
$ tar xvfz BeautifulSoup-3.2.0.tar.gz
$ cd BeautifulSoup-3.2.0/
$ python setup.py build
$ sudo python setup.py install
5 Install pycrypto (for encrypted PDFs) from ./depends/pycrypto-2.4.1.tar.gz
(alternatively from the pypi http://pypi.python.org/pypi/pycrypto/2.4.1)
$ cd depends
$ tar xvfz pycrypto-2.4.1.tar.gz
$ cd pycrypto-2.4.1
$ python setup.py build
$ sudo python setup.py install
2,
echo "/usr/local/lib" >> /etc/ld.so.conf
ldconfig
Monday, February 23, 2015
update kibana replicas to 0
This kibana 4
Update public/index.js file
line 46098
root@cm-log-manager:/opt/kibana/src# vim ./public/index.js
```
define('components/setup/steps/create_kibana_index',['require','components/setup/_setup_error'],function (require) {
return function CreateKibanaIndexFn(Private, es, configFile, Notifier) {
return function createKibanaIndex() {
var notify = new Notifier({ location: 'Setup: Kibana Index Creation' });
var complete = notify.lifecycle('kibana index creation');
var SetupError = Private(require('components/setup/_setup_error'));
return es.indices.create({
index: configFile.kibana_index,
body: {
settings: {
number_of_shards : 1,
number_of_replicas: 0
}
}
})
.catch(function (err) {
throw new SetupError('Unable to create Kibana index "<%= configFile.kibana_index %>"', err);
})
.then(function () {
return es.cluster.health({
waitForStatus: 'yellow',
index: configFile.kibana_index
})
.catch(function (err) {
throw new SetupError('Waiting for Kibana index "<%= configFile.kibana_index %>" to come online failed', err);
});
})
.then(complete, complete.failure);
};
};
});
Update public/index.js file
line 46098
root@cm-log-manager:/opt/kibana/src# vim ./public/index.js
define('components/setup/steps/create_kibana_index',['require','components/setup/_setup_error'],function (require) {
return function CreateKibanaIndexFn(Private, es, configFile, Notifier) {
return function createKibanaIndex() {
var notify = new Notifier({ location: 'Setup: Kibana Index Creation' });
var complete = notify.lifecycle('kibana index creation');
var SetupError = Private(require('components/setup/_setup_error'));
return es.indices.create({
index: configFile.kibana_index,
body: {
settings: {
number_of_shards : 1,
number_of_replicas: 0
}
}
})
.catch(function (err) {
throw new SetupError('Unable to create Kibana index "<%= configFile.kibana_index %>"', err);
})
.then(function () {
return es.cluster.health({
waitForStatus: 'yellow',
index: configFile.kibana_index
})
.catch(function (err) {
throw new SetupError('Waiting for Kibana index "<%= configFile.kibana_index %>" to come online failed', err);
});
})
.then(complete, complete.failure);
};
};
});
```
Monday, January 26, 2015
install bd on ubuntu 64
1, download
http://download.bitdefender.com/SMB/Workstation_Security_and_Management/BitDefender_Antivirus_Scanner_for_Unices/Unix/Current/EN_FR_BR_RO/Linux/
wget http://enterprise.bitdefender.com/site/Downloads/dwf/625/
2, install
mv index.html BitDefender-Antivirus-Scanner-7.6-4.linux-gcc4x.amd64.deb.run
chmod +x BitDefender-Antivirus-Scanner-7.6-4.linux-gcc4x.amd64.deb.run
./BitDefender-Antivirus-Scanner-7.6-4.linux-gcc4x.amd64.deb.run
3, update sig
bdscan --update
http://download.bitdefender.com/SMB/Workstation_Security_and_Management/BitDefender_Antivirus_Scanner_for_Unices/Unix/Current/EN_FR_BR_RO/Linux/
wget http://enterprise.bitdefender.com/site/Downloads/dwf/625/
2, install
mv index.html BitDefender-Antivirus-Scanner-7.6-4.linux-gcc4x.amd64.deb.run
chmod +x BitDefender-Antivirus-Scanner-7.6-4.linux-gcc4x.amd64.deb.run
./BitDefender-Antivirus-Scanner-7.6-4.linux-gcc4x.amd64.deb.run
3, update sig
bdscan --update
Install F-Prot in ubuntu 64
dpkg --add-architecture i386
apt-get update
apt-get install libc6:i386 libncurses5:i386 libstdc++6:i386
1 dowload
http://files.f-prot.com/files/unix-trial/fp-Linux.x86.32-ws.tar.gz
2, unzip and mv it to /opt
3, cd /opt/f-prot/
./install-f-prot.pl
apt-get update
apt-get install libc6:i386 libncurses5:i386 libstdc++6:i386
http://files.f-prot.com/files/unix-trial/fp-Linux.x86.32-ws.tar.gz
2, unzip and mv it to /opt
3, cd /opt/f-prot/
./install-f-prot.pl
install AVG to ubuntu 64 bits
There are two ways to do it:
A:
1, Download AVG
wget http://download.avgfree.com/filedir/inst/avg2013flx-r3118-a6926.i386.deb
sudo apt-get update
sudo apt-get install libc6:i386 libncurses5:i386 libstdc++6:i386
sudo dpkg -i avg2013flx-r3118-a6926.i386.deb
B:
1, Download AVG
wget http://download.avgfree.com/filedir/inst/avg2013flx-r3118-a6926.i386.deb
2, Install 32 lib
apt-get install libc6:i386
cd /etc/apt/sources.list.d
echo "deb http://old-releases.ubuntu.com/ubuntu/ raring main restricted universe multiverse" >ia32-libs-raring.list
apt-get update
apt-get install ia32-libs
rm /etc/apt/sources.list.d/ia32-libs-raring.list
apt-get update
apt-get install gcc-multilib
3, Install AVG
dpkg -i avg2013flx-r3118-a6926.i386.deb
/opt/avg/av/bin/avgsetup
avgupdate
A:
1, Download AVG
wget http://download.avgfree.com/filedir/inst/avg2013flx-r3118-a6926.i386.deb
2, Install 32 lib
sudo dpkg --add-architecture i386sudo apt-get update
sudo apt-get install libc6:i386 libncurses5:i386 libstdc++6:i386
sudo dpkg -i avg2013flx-r3118-a6926.i386.deb
1, Download AVG
wget http://download.avgfree.com/filedir/inst/avg2013flx-r3118-a6926.i386.deb
2, Install 32 lib
apt-get install libc6:i386
cd /etc/apt/sources.list.d
echo "deb http://old-releases.ubuntu.com/ubuntu/ raring main restricted universe multiverse" >ia32-libs-raring.list
apt-get update
apt-get install ia32-libs
rm /etc/apt/sources.list.d/ia32-libs-raring.list
apt-get update
apt-get install gcc-multilib
3, Install AVG
dpkg -i avg2013flx-r3118-a6926.i386.deb
/opt/avg/av/bin/avgsetup
avgupdate
Tuesday, January 6, 2015
EW:~/code/nutty/chef$ export LD_LIBRARY_PATH=/usr/lib/oracle/12.1/client64/lib
EW:~/code/nutty/chef$
EW:~/code/nutty/chef$
EW:~/code/nutty/chef$
EW:~/code/nutty/chef$ /usr/bin/sqlplus64
SQL*Plus: Release 12.1.0.2.0 Production on Tue Jan 6 15:48:14 2015
Copyright (c) 1982, 2014, Oracle. All rights reserved.
Enter user-name: ^C
chang@ACPNEW:~/code/nutty/chef$ /usr/bin/sqlplus64 meta94@//dev-db.cwxbj.us-west-2.rds.amcom:1521/META
SQL*Plus: Release 12.1.0.2.0 Production on Tue Jan 6 15:48:44 2015
Copyright (c) 1982, 2014, Oracle. All rights reserved.
Connected to:
Oracle Database 11g Release 11.2.0.2.0 - 64bit Production
EW:~/code/nutty/chef$
EW:~/code/nutty/chef$
EW:~/code/nutty/chef$
EW:~/code/nutty/chef$ /usr/bin/sqlplus64
SQL*Plus: Release 12.1.0.2.0 Production on Tue Jan 6 15:48:14 2015
Copyright (c) 1982, 2014, Oracle. All rights reserved.
Enter user-name: ^C
chang@ACPNEW:~/code/nutty/chef$ /usr/bin/sqlplus64 meta94@//dev-db.cwxbj.us-west-2.rds.amcom:1521/META
SQL*Plus: Release 12.1.0.2.0 Production on Tue Jan 6 15:48:44 2015
Copyright (c) 1982, 2014, Oracle. All rights reserved.
Connected to:
Oracle Database 11g Release 11.2.0.2.0 - 64bit Production
Subscribe to:
Posts (Atom)
-
Step 1, New a project rails new demo Step 2, Update Gemfile add paperclip, mysql2 gem, enable JavaScript runtime gem 'mysql2' ...
-
I used 7z to zip this file under Windows, try to unzip it under linux [ang@walker temp]$ gunzip 2011.sdf.zip gunzip: 2011.sdf.zip: unkno...
-
When trying to access transmission from web-browswer i got the message : 403: Forbidden Unauthorized IP Address. Either disable the IP ad...