input { # MQTT dataset mqtt { host => "test.mosquitto.org" port => 1883 topic => "logstash/plant-data" qos => 2 } # File dataset file { path => "/home/kgothatso/dev/in-sight/logstash/data/plant_data.csv" start_position => "beginning" sincedb_path => "/dev/null" } # NoSQL datasets mongodb { uri => 'mongodb://localhost/ahmsdb?ssl=false' placeholder_db_dir => 'logstash/data/mongodb' placeholder_db_name => 'logstash_mongodb.db' collection => 'water_tank' batch_size => 5000 } # SQL dataset sqlite { path => "/home/user/databases/sqlite-tools-linux-x86-3380200/testdb.db" type => "test.company" } jdbc { jdbc_driver_library => "drivers/postgresql-42.3.4.jar" jdbc_connection_string => "jdbc:postgresql://localhost:5432/frepple" jdbc_user => "frepple" jdbc_password => "frepple" jdbc_driver_class => "org.postgresql.Driver" schedule => "* * * * * *" # cronjob schedule format (see "Helpful Links") statement => "SELECT * FROM public.supplier" # the PG command for retrieving the documents IMPORTANT: no semicolon! jdbc_paging_enabled => "true" jdbc_page_size => "300" } # OPC-UA dataset plc4x { jobs => { job1 => { rate => 200 sources => ["sensor1"] queries => { PreStage => "ns=2;i=3" MidStage => "ns=2;i=4" PostStage => "ns=2;i=5" ConveyorBeltTimestamp => "ns=2;i=7" } } } sources => { sensor1 => "opcua:tcp://127.0.0.1:4840/freeopcua/server/" } } } filter { # parse MQTT input json { source => "message" } mutate { remove_field => [ "message" ] } # parse file input csv { separator => "," autodetect_column_names => true autogenerate_column_names => true columns => ["Time", "Cleanliness", "LPS120_140", "LPS140_160", "LPS20_30", "LPS30_40", "LPS40_50", "LPS50_60", "LPS60_90", "LPS90_120", "MV_Grade", "No_Signal", "PSP1000M", "INCLUSION_COUNT", "FILTERED_MASS", "INCLUSION_TYPE", "SAMPLE_RESULT"] } mutate { convert => { "[Cleanliness]" => "float" "[LPS120_140]" => "float" "[LPS140_160]" => "float" "[LPS20_30]" => "float" "[LPS30_40]" => "float" "[LPS40_50]" => "float" "[LPS50_60]" => "float" "[LPS60_90]" => "float" "[LPS90_120]" => "float" "[MV_Grade]" => "float" "[No_Signal]" => "float" "[PSP1000M]" => "float" "[INCLUSION_COUNT]" => "float" "[FILTERED_MASS]" => "float" } remove_field => [ "message", "path" ] } # parse MongoDB dataset mutate { rename => { "_id" => "id"} remove_field => ["_id"] } json { source => "message" } # SQL dataset needs no parsing as it is tabular and each row will be treated as an observation # parse OPC-UA dataset json { source => "values" } } output { in-sight { hosts => ["http://127.0.0.1:9200", "http://127.0.0.1:9201", "http://127.0.0.1:9202", "http://127.0.0.1:9203"] user => "admin" password => "admin" index => ["mqtt", "file", "nosql", "sql", "opc-ua"] } stdout { codec => rubydebug } }