Thursday, April 9, 2015

1:  root@es-master1z:~# curl -XGET 'http://localhost:9200/gold*/_mapping?pretty'

curl -XPUT 'http://localhost:9200/soci*/tweets/_mapping' -d '{"_ttl": {"enabled": true, "default": 5000}}'


2: curl -XPUT 'http://localhost:9200/smarttvsysinfo/'

3:




curl -XPUT 'http://localhost:9200/_template/template_1' -d '
{
    "template" : "gold*",
    "settings" : {
        "analysis" : {
            "filter" : {
                "tweet_filter" : {
                    "type" : "word_delimiter",
                    "split_on_case_change":"false",
                    "split_on_numerics":"false",
                    "type_table": ["# => ALPHA", "@ => ALPHA", "_ => ALPHA"]
                }
            },
            "analyzer" : {
                "tweet_analyzer" : {
                    "type" : "custom",
                    "tokenizer" : "whitespace",
                    "filter" : ["lowercase", "tweet_filter"]
                }
            }
        }
    },
   "mappings" : {
        "_default_" : {
            "_ttl" : { "enabled" : true, "default": 180002 } ,
            "properties" : {
                "text" : {
                    "type" : "string",
                    "analyzer" : "tweet_analyzer"
                },
                "retweeted_status" : {
                    "properties" : {
                        "text" : {
                            "type" : "string",
                            "analyzer" : "tweet_analyzer"
                        }
                    }
                },
                "user" : {
                    "properties" : {
                        "screen_name" : {
                            "type" : "string",
                            "analyzer" : "tweet_analyzer"
                        }
                    }
                },
                "in_reply_to_screen_name" : {
                    "type" : "string",
                    "analyzer" : "tweet_analyzer"
                }
            }
        }

    }
}
'

Friday, March 27, 2015

elasticsearch create index and add documents

1, Create Index
curl -XPUT 'http://localhost:9200/twitter/'

2, add docs:
 curl -XPUT 'http://localhost:9200/twitter/tweet/22' -d '{
    "user" : "kimchy22",
    "post_date" : "2015-03-27T12:12:12",
    "message" : "trying out Elasticsearch"
}'
  
 curl -XPUT 'http://localhost:9200/twitter/tweet/21' -d '{
    "user" : "kimchy21",
    "post_date" : "2015-03-27T11:12:12",
    "message" : "trying out Elasticsearch"
}'

Thursday, March 26, 2015

curl search elasticsearch

curl -XPOST 'http://10.0.1.152:9200/logstash-*/_search?pretty' -d '
{
  "query": {
    "filtered": {
      "query": {
        "query_string": {
          "query": "*listing*"
        }
      }
    }
  },
 "fields": [
    "code",
    "time_taken",
    "uri_path"
  ],  "from": 0,
  "size": 1,
  "sort": {
    "_score": {
      "order": "asc"
    }
  },
  "explain": true
}
'

Monday, March 16, 2015

ubuntu 14 64 bits install ELK - March 2015.

1, install jdk
apt-get update
apt-get upgrade
apt-get install openjdk-7-jdk

2, install elasticsearch-1.4.4 
wget https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-1.4.4.deb
dpkg -i elasticsearch-1.4.4.deb

3, install logstash_1.4.2
dpkg -i logstash_1.4.2-1-2c0f5a1_all.deb

Create conf file:

vim /etc/logstash/conf.d/test.conf

input {
  file {
    type => "tomcat_access"
    path => [ "/home/ubuntu/elk/samplelogs/*.log" ]
    start_position => "beginning"
  }
}
filter {
 if [type] == "tomcat_access" {
   grok{
     patterns_dir => "./patterns"
     match => [ "message", '%{IPORHOST:client} %{DATA} %{DATA:user} \[%{DATA:timestamp} %{ISO8601_TIMEZONE:timezone}\] \"%{WORD:method} %{URIPATH:uri_path}(%{URIPARAM:params}|) %{DATA:protocol}\" %{NUMBER:code} (%{NUMBER:bytes}|%{DATA}) %{NUMBER:time_taken}' ]
     overwrite => [ "message" ]
   }
   date {
     locale => "en"
     match => [ "timestamp", "dd/MMM/yyyy:HH:mm:ss" ]
     timezone => "UTC"
     target => "logTimestamp"
   }
   mutate {
    convert => [ "code", "integer" ]
    convert => [ "time_taken", "integer" ]
  }
 }
}
output {
  stdout {
    codec => rubydebug
  }
  elasticsearch {
    host => "localhost"
    protocol => "http"
    port => "9200"
    node_name => "elk-test"
  }
}



4, install  kibana-4.0.1
wget https://download.elasticsearch.org/kibana/kibana/kibana-4.0.1-linux-x64.tar.gz
tar zxvf kibana-4.0.1-linux-x64.tar.gz
mv kibana-4.0.1-linux-x64 /opt/

5, Add kibana into startup service
6, run services:
/etc/init.d/elasticsearch start
/etc/init.d/logstash start
/etc/init.d/kibana4 start

7, access kibana
http://x.x.x.x:5601/
If you cannot open this page, try to open the port 5601 from your firewall.



Friday, March 6, 2015

delete elasticsearch logstash index automatically

1, Wrote a shell script to delete two days old index.

dave@cm-log-manager:~/tools$ cat delete_ES_index.sh

#!/bin/bash

yesterday=`/bin/date --date="-1 days" +%Y.%m.%d`
echo $yesterday

twodayago=`/bin/date --date="-2 days" +%Y.%m.%d`
echo $twodayago
curl -XDELETE "http://localhost:9200/logstash-$twodayago"

threedayago=`/bin/date --date="-3 days" +%Y.%m.%d`
echo $threedayago
curl -XDELETE "http://localhost:9200/logstash-$threedayago"


fourdayago=`/bin/date --date="-4 days" +%Y.%m.%d`
echo $fourdayago
curl -XDELETE "http://localhost:9200/logstash-$fourdayago"


fivedayago=`/bin/date --date="-5 days" +%Y.%m.%d`
echo $fivedayago
curl -XDELETE "http://localhost:9200/logstash-$fivedayago"

2, Add this script to cronjob
1 23 * * * /home/dave/tools/delete_ES_index.sh

3, now only two days index left


Thursday, March 5, 2015

install jsunpack-n on ubuntu 32/64 bits

0:
apt-get update
apt-get upgrade
apt-get install  build-essential make patch gettext gcc g++ automake autoconf libtool  flex bison libglib2.0-dev libnet1-dev

apt-get install linux-headers-$(uname -r) build-essential make patch gettext gcc g++ automake autoconf libtool  flex bison libglib2.0-dev libnet-dev

1, install js
apt-get  install libmozjs*
ln -s /usr/bin/js24 /usr/bin/js

2, install pynids
apt-get install libpcap-dev pkg-config python-dev libgtk2.0-dev libnet1-dev libnids1.21 libnids-dev
$ sudo ldconfig
wget https://jon.oberheide.org/pynids/downloads/pynids-0.6.1.tar.gz --no-check-certificate

tar zxvf pynids-0.6.1.tar.gz
cd pynids-0.6.1/
python setup.py build
python setup.py install

3, install yara
apt-get install yara
apt-get install python-yara python-magic

4  Build and install BeautifulSoup from ./depends/BeautifulSoup-3.2.0.tar.gz
    (alternatively from the pypi http://pypi.python.org/pypi/BeautifulSoup/3.2.0)

    $ cd depends
    $ tar xvfz BeautifulSoup-3.2.0.tar.gz
    $ cd BeautifulSoup-3.2.0/
    $ python setup.py build
    $ sudo python setup.py install

5 Install pycrypto (for encrypted PDFs) from ./depends/pycrypto-2.4.1.tar.gz
    (alternatively from the pypi http://pypi.python.org/pypi/pycrypto/2.4.1)

    $ cd depends
    $ tar xvfz pycrypto-2.4.1.tar.gz
    $ cd pycrypto-2.4.1
    $ python setup.py build
    $ sudo python setup.py install


2, 
echo "/usr/local/lib" >> /etc/ld.so.conf
ldconfig

Monday, February 23, 2015

update kibana replicas to 0

This kibana 4
Update public/index.js file
line 46098

root@cm-log-manager:/opt/kibana/src# vim ./public/index.js

```
define('components/setup/steps/create_kibana_index',['require','components/setup/_setup_error'],function (require) {
  return function CreateKibanaIndexFn(Private, es, configFile, Notifier) {
    return function createKibanaIndex() {
      var notify = new Notifier({ location: 'Setup: Kibana Index Creation' });
      var complete = notify.lifecycle('kibana index creation');
      var SetupError = Private(require('components/setup/_setup_error'));

      return es.indices.create({
        index: configFile.kibana_index,
        body: {
          settings: {
            number_of_shards : 1,
            number_of_replicas: 0
          }
        }
      })
      .catch(function (err) {
        throw new SetupError('Unable to create Kibana index "<%= configFile.kibana_index %>"', err);
      })
      .then(function () {
        return es.cluster.health({
          waitForStatus: 'yellow',
          index: configFile.kibana_index
        })
        .catch(function (err) {
          throw new SetupError('Waiting for Kibana index "<%= configFile.kibana_index %>" to come online failed', err);
        });
      })
      .then(complete, complete.failure);
    };
  };
});
```