Run multiple instance of logstash

i have this Error : FATAL logstash.runner - Logstash could not be started because there is already another instance using the configured data directory. If you wish to run multiple instances, you must change the "path.data" setting.
help please

What is not clear around the error message?

I can not solve this error

Either you run one instance of Logstash with all configs, or you need to install separate instances with different data directories.

And how can i to do it ?

What is it you are trying to do? Which operating system are you using? How did you install Logstash?

1 Like

I use centos and j installed logstash from RPM source I have 3 database source SQL server, postgres and oracle database and I have 2 csv file I must index them in elasticsearch

This is how i start my logstash specifying path.data

{logstash_home}/bin/logstash -f /usr/share/logstash/kafka-solr-pipeline.conf --config.reload.automatic --path.data /usr/share/logstash/data/1

1 Like

If you install using RPM I do not think there is any automatic way to create multiple services, so you may be better off separating the processing flows in your configuration files using conditionals and have a single Logstash instance process them all.

1 Like

how can i do this

Update all your config files with tags and associated conditionals so that they can be merged into a single config without issues, and then put them all into the Logstash config directory, which should be /etc/logstash/conf.d by default.

you can help me do it ?

input {
file {
path => "/etc/logstash/conf.d/Produit.csv"
start_position => "beginning"
sincedb_path => "/dev/null"
}
}
filter {
csv {
separator => ";"
columns =>["ProductId","Name","Description"]
}
}
output {
elasticsearch {
hosts => "192.168.10.150"
workers => 1
index => "Produit"
document_type => "Produit"
}

stdout {codec => rubydebug}
input {
file {
path => "/etc/logstash/conf.d/Contrat.csv"
start_position => "beginning"
sincedb_path => "/dev/null"
}
}
filter {
csv {
separator => ";"
columns =>["......."]
}
}
output {
elasticsearch {
hosts => "192.168.10.150"
workers => 1
index => "Contrat"
document_type => "Contrat"
}
stdout {codec => rubydebug}

jdbc {

        jdbc_connection_string => "jdbc:postgresql://192.168.10.100:5432/postgres"

        jdbc_user => "postgres"
        jdbc_validate_connection => true
        jdbc_driver_library => "/etc/logstash/postgresql-42.0.0.jar"

        jdbc_driver_class => "org.postgresql.Driver"

        statement => "SELECT * from clientb2b"
}

}
output {
    elasticsearch {


        index => "clientbb"
        document_type => "clientbb"
        document_id => "%{telephone}"
        hosts => "192.168.10.150"

input {

jdbc {

jdbc_driver_library => "/opt/ojdbc6.jar"

jdbc_driver_class => "Java::oracle.jdbc.driver.OracleDriver"

jdbc_connection_string => "jdbc:oracle:thin:@192.168.10.137:1521/sv00"

jdbc_user => "COSWIN"

jdbc_password => "COSWIN"

#jdbc_validate_connection => TRUE

statement => "SELECT * from contact_client"

}

}

output {

elasticsearch {

action => "index"

hosts => "192.168.10.150:9200"

index => "client_contact"

document_type => "client_contact"

}
stdout { codec => rubydebug }
}

input {
  jdbc {
    jdbc_driver_library => "/etc/logstash/sqljdbc42.jar"
    jdbc_driver_class => "com.microsoft.sqlserver.jdbc.SQLServerDriver"
    jdbc_connection_string => "jdbc:sqlserver://172.17.1.100;user=sa;password=root;"
    jdbc_user => "sa"
    jdbc_password => "root"
    statement_filepath => "/etc/logstash/conf.d/query.sql"
    
  }
}
filter {
}
output {
    elasticsearch {
 	
        #protocol => "http"
        index => "incident"
        document_type => "incident"
        document_id => "%{incidentid}"
        hosts => "192.168.10.150"
    }
  stdout { codec => rubydebug }
}

any help please

Easiest way is to add a distinct tag for each input using the tags parameter and then use conditionals to ensure that the correct filters and outputs are applied. Something like this:

input {
  jdbc {

    ...
    
    tags =>["jdbc1"]
  }
}

filter {
  if "jdbc1" in [tags] {

    ...
  
  }
}

output {
  if "jdbc1" in [tags] {
    elasticsearch {
  
      ...
  
    }
  }
}
1 Like

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.