Restore Log #Core

Restore Log #Core

# Stop lifecycle policy
POST /_ilm/stop


# Restore Index
## Get snapshot and index name first
GET /_snapshot/core_repo_s3_6.9/daily-snap-2020.11.16*

## Restore index

POST /_snapshot/core_repo_s3_6.9/daily-snap-2020.11.16-wmlfvvhlt4yesmfn9npy_g/_restore
{
"indices": "logstash-2020.11.15",
"ignore_unavailable": true,
"include_global_state": true
}


# Remove lifecycle policy from restored index
POST logstash-2020.11.15/_ilm/remove


# start lifecycle policy

POST /_ilm/start














OLD
# list  snapshot in s3 repository

GET /_cat/snapshots/s3_repository?s=id:desc


# restore snapshot after 2019.02.01


POST /_snapshot/s3_repository/curator-2019.03.07/_restore?pretty
{
  "indices": "filebeat-2019.03.06",
  "include_global_state": false
}


# restore snapshot before 2019.02.01


POST /_snapshot/s3_repository/filebeat-2018.12.21/_restore?pretty
{
  "indices": "filebeat-2018.12.21",
  "include_global_state": false
}




GET filebeat-2018.11.15




GET filebeat-2018.*





POST /_snapshot/s3_repository?verify=true&pretty 
{
  "type": "s3",
  "settings": {
    "bucket": "elk-logging-snapshot",
    "region": "us-east-1",
    "access_key": "XXXXXXX",
    "secret_key": "XXXXX",
    "server_side_encryption" : "true",
    "compress":"true"
  }
}





GET /_cat/snapshots/s3_repository?pretty




POST /_snapshot/s3_repository/filebeat-2019.01.03?wait_for_completion=true
{
   "indices": "filebeat-2019.01.03",  
   "include_global_state": false
}





POST /_snapshot/s3_repository/filebeat-2018.10.12?wait_for_completion=true
{
   "ignore_unavailable": true,
   "include_global_state": false
}




POST /_snapshot/s3_repository/filebeat-2018.05.03
{
  "indices": "filebeat-2018.05.03",  
  "ignore_unavailable": true,
  "include_global_state": false
}


GET /_cat/snapshots/s3_repository?h=*&format=json&s=id:desc



GET /_cat/snapshots/s3_repository?pretty


GET /_cat/snapshots/found-snapshots?h=*&s=id:desc



GET _cat/indices



DELETE /_snapshot/s3_repository/filebeat-2018.01.20


GET /_cat/snapshots/found-snapshots



GET /_cat/repositories?pretty


DELETE sales_order_staging



GET /_cat/indices?s=index:desc




GET Fil



GET /_cat/snapshots/s3_repository?s=id:desc




GET /filebeat-2019.01.09/_search
{
  "from" : 0, "size" : 1000,
   "query": {
    "bool": {
      "filter": [
        { "term": { "queue": "HPT.SYNC.Orderhive.WEBHOOK"}},
        { "term": { "action": "messageConsumed"}}
      ]
    }
  }
}




GET /filebeat-2018.03.0


GET _template/


GET /_cat/shards?v




GET _nodes



GET /filebeat-2018.04.22


DELETE /filebeat-2018.02.02



POST /_snapshot/s3_repository/filebeat-2018.08.10/_restore?pretty
{
 "indices": "filebeat-2018.08.10",
 "ignore_unavailable": true,
 "include_global_state": false
}



GET _cat/shards





GET elk_access_log_*/_search
{
  "from" : 0, "size" : 1000,
  "query": {
    "bool": {
      
    }
  }
}



POST /_snapshot/s3_repository?verify=true&pretty 
{
  "type": "s3",
  "settings": {
    "bucket": "elk-logging-snapshot",
    "region": "us-east-1",
    "access_key": "xxxxxxx",
    "secret_key": "xxxxxx",
    "server_side_encryption" : "true",
    "compress":"true"
  }
}


GET /elk_access_log_20180316/_search




PUT /_ingest/pipeline/apache-access-log
{
  "description": "Pipeline for parsing Apache2 access logs. Requires the geoip and user_agent plugins.",
  "processors": [{
    "grok": {
      "field": "message",
      "patterns":[
        "%{IPORHOST:apache2.access.remote_ip} - %{DATA:apache2.access.user_name} \\[%{HTTPDATE:apache2.access.time}\\] \"%{WORD:apache2.access.method} %{DATA:apache2.access.url} HTTP/%{NUMBER:apache2.access.http_version}\" %{NUMBER:apache2.access.response_code} (?:%{NUMBER:apache2.access.body_sent.bytes}|-)( \"%{DATA:apache2.access.referrer}\")?( \"%{DATA:apache2.access.agent}\")?",
        "%{IPORHOST:apache2.access.remote_ip} - %{DATA:apache2.access.user_name} \\[%{HTTPDATE:apache2.access.time}\\] \"-\" %{NUMBER:apache2.access.response_code} -"
        ],
      "ignore_missing": true
    }
  },{
    "remove":{
      "field": "message"
    }
  }, {
    "rename": {
      "field": "@timestamp",
      "target_field": "read_timestamp"
    }
  }, {
    "date": {
      "field": "apache2.access.time",
      "target_field": "@timestamp",
      "formats": ["dd/MMM/YYYY:H:m:s Z"]
    }
  }, {
    "remove": {
      "field": "apache2.access.time"
    }
  }, {
    "user_agent": {
      "field": "apache2.access.agent",
      "target_field": "apache2.access.user_agent",
      "ignore_failure": true
    }
  }, {
    "remove": {
      "field": "apache2.access.agent",
      "ignore_failure": true
    }
  }, {
    "geoip": {
      "field": "apache2.access.remote_ip",
      "target_field": "apache2.access.geoip"
    }
  }],
  "on_failure" : [{
    "set" : {
      "field" : "error.message",
      "value" : "{{ _ingest.on_failure_message }}"
    }
  }]
}


    • Related Articles

    • Restore Log #plus

      GET .monitoring-kibana-6-2019*/_search/ {  "query": {    "match_all": {}  } } GET /filebeat-6.2.4-2019.* POST /filebeat-6.2.4-2018.11.0*/_close DELETE  /filebeat-6.2.4-2018.11.09 POST /_snapshot/s3_repository/curator-2019.02.24/_restore?pretty { ...
    • Steps to install Magento 2.2.6 in Kubernetes

      Step 1 : Install Magento and Create Ingress helm install --name magento-name --namespace magento-name -f /root/magento_helm/value.yaml stable/magento --version 3.3.0 Step 2 : Setup Crontab Make sure crontab is running. service cron start  Commands : ...
    • What & How : CI/CD Tools 2 (Kibana)

      Now you can see logs in Kibana(Elastic Search) your log files will be transferred here so you can easily have a look at erros without logging in SSH/putty.  here's the link : oxkibana.orderhive.plus How to use kibana: 1. Go to the link: This is main ...
    • What & How: Add Sudo User in any Ubuntu System.

      1. Create a new user adduser username 2. Add it to Sudoers Group so it can have the privilege of sudo access. usermod -aG sudo username 3. Now If you want to enable password-based login for this user and you are getting an error like "Permission ...
    • Viewing App Logs in Elastic Kibana Dashboard

      - Login to Kibana Dashboard at https://elastic.openxcell.dev - Choose Openxcell Development Space: - Add Container name filter to filter your application logs - Filter Message Field only: - Select Time Range: