multiple inputs on logstash jdbc

前端 未结 3 1908
故里飘歌
故里飘歌 2020-12-04 20:12

I am using logstash jdbc to keep the things syncd between mysql and elasticsearch. Its working fine for one table. But now I want to do it for multiple tables. Do I need to

3条回答
  •  鱼传尺愫
    2020-12-04 20:58

    This will not create duplicate data. and compatible logstash 6x.

    # YOUR_DATABASE_NAME : test
    # FIRST_TABLE :  place  
    # SECOND_TABLE :  things    
    # SET_DATA_INDEX : test_index_1, test_index_2
    
    input {
        jdbc {
            # The path to our downloaded jdbc driver
            jdbc_driver_library => "/mysql-connector-java-5.1.44-bin.jar"
            jdbc_driver_class => "com.mysql.jdbc.Driver"
            # Postgres jdbc connection string to our database, YOUR_DATABASE_NAME
            jdbc_connection_string => "jdbc:mysql://localhost:3306/test"
            # The user we wish to execute our statement as
            jdbc_user => "root"
            jdbc_password => ""
            schedule => "* * * * *"
            statement => "SELECT  @slno:=@slno+1 aut_es_1, es_qry_tbl.* FROM (SELECT * FROM `place`) es_qry_tbl, (SELECT @slno:=0) es_tbl"
            type => "place"
            add_field => { "queryFunctionName" => "getAllDataFromFirstTable" }
            use_column_value => true
            tracking_column => "aut_es_1"
        }
    
        jdbc {
            # The path to our downloaded jdbc driver
            jdbc_driver_library => "/mysql-connector-java-5.1.44-bin.jar"
            jdbc_driver_class => "com.mysql.jdbc.Driver"
            # Postgres jdbc connection string to our database, YOUR_DATABASE_NAME
            jdbc_connection_string => "jdbc:mysql://localhost:3306/test"
            # The user we wish to execute our statement as
            jdbc_user => "root"
            jdbc_password => ""
            schedule => "* * * * *"
            statement => "SELECT  @slno:=@slno+1 aut_es_2, es_qry_tbl.* FROM (SELECT * FROM `things`) es_qry_tbl, (SELECT @slno:=0) es_tbl"
            type => "things"
            add_field => { "queryFunctionName" => "getAllDataFromSecondTable" }
            use_column_value => true
            tracking_column => "aut_es_2"
        } 
    }
    
    # install uuid plugin 'bin/logstash-plugin install logstash-filter-uuid'
    # The uuid filter allows you to generate a UUID and add it as a field to each processed event.
    
    filter {
    
        mutate {
                add_field => {
                        "[@metadata][document_id]" => "%{aut_es_1}%{aut_es_2}"
                }
        }
    
        uuid {
            target    => "uuid"
            overwrite => true
        }    
    }
    
    output {
        stdout {codec => rubydebug}
        if [type] == "place" {
            elasticsearch {
                hosts => "localhost:9200"
                index => "test_index_1_12"
                #document_id => "%{aut_es_1}"
                document_id => "%{[@metadata][document_id]}"
            }
        }
        if [type] == "things" {
            elasticsearch {
                hosts => "localhost:9200"
                index => "test_index_2_13"
                document_id => "%{[@metadata][document_id]}"
                # document_id => "%{aut_es_2}"
                # you can set document_id . otherwise ES will genrate unique id. 
            }
        }
    }
    

提交回复
热议问题