Telegraf - multiple input, multiple output - influxdb

I want to write every input on the relative DB (ex. input1 --> DB1, input2-->DB2) on influxdb
This is my telegraf.conf
# OUTPUT PLUGINS #
[[outputs.influxdb]]
urls = ["http://172.18.0.2:8086"]
database = "shellyem"
namepass = ["db1"]
# OUTPUT PLUGINS #
[[outputs.influxdb]]
urls = ["http://172.18.0.2:8086"]
database = "shell"
namepass = ["db2"]
# INPUT PLUGINS #
[[inputs.db1]]
urls = [
"http://192.168.1.191/emeter/0",
]
timeout = "1s"
data_format = "json"
# INPUT PLUGINS #
[[inputs.db2]]
urls = [
"http://192.168.1.192/emeter/0",
]
timeout = "1s"
data_format = "json"
It doesn't work because i don't understand how namepass works, can you help me? Thank you.

But it's so simple, just read for dindirindina
ok copy and paste the code below
[[outputs.influxdb]]
urls = ["http://172.18.0.2:8086"]
database = "Mirko"
[outputs.influxdb.tagpass]
influxdb_tag = ["Mirko"]
[[outputs.influxdb]]
urls = ["http://172.18.0.2:8086"]
database = "Simone"
[outputs.influxdb.tagpass]
influxdb_tag = ["Simone"]
[[inputs.http]]
urls = [
"http://192.168.1.191/emeter/0",
"http://192.168.1.191/emeter/1"
]
data_format = "json"
[inputs.http.tags]
influxdb_tag = "Mirko"
[[inputs.http]]
urls = [
"http://192.168.1.201/emeter/0",
"http://192.168.1.201/emeter/1"
]
data_format = "json"
[inputs.http.tags]
influxdb_tag = "Simone"

Related

Different buckets in influx from telegraf

I've tried to set up a second bucket, but everyting that goes into the existing bucket is being put into the new bucket. Why?
[[outputs.influxdb_v2]]
urls = ["http://127.0.0.1:8086"]
token = "token=="
organization = "mini31"
bucket = "zigbee"
[[inputs.mqtt_consumer]]
servers = ["tcp://127.0.0.1:1883"]
topics = [
"zigbee2mqtt/Home/+/Temp",
]
data_format = "json_v2"
[[inputs.mqtt_consumer.json_v2]]
measurement_name = "temperature"
[[inputs.mqtt_consumer.topic_parsing]]
topic = "zigbee2mqtt/Home/+/Temp"
tags = "_/_/room/_"
[[inputs.mqtt_consumer.json_v2.field]]
path = "temperature"
type = "float"
# this is supposed to be the new bucket, but it's receiving everytyhing from zigbee.
[[outputs.influxdb_v2]]
urls = ["http://127.0.0.1:8086"]
token = "token=="
organization = "mini31"
bucket = "solar"
[[inputs.mqtt_consumer]]
servers = ["tcp://127.0.0.1:1883"]
topics = [
"solar/inverter"
]
data_format = "json_v2"
[[inputs.mqtt_consumer.json_v2]]
measurement_name = "generation"
[[inputs.mqtt_consumer.json_v2.field]]
path = "ppv"
rename = "generation"
type = "int"
How can I keep the existing zigbee stuff going into its bucket but stop it from also going into the new bucket?
Attempt 2
[[outputs.influxdb_v2]]
urls = ["http://127.0.0.1:8086"]
token = "token=="
organization = "mini31"
bucket = ""
bucket_tag = "bucket"
[[inputs.mqtt_consumer]]
servers = ["tcp://127.0.0.1:1883"]
topics = [
"zigbee2mqtt/Home/+/Temp",
]
data_format = "json_v2"
exclude_bucket_tag = true
[inputs.mqtt_consumer.tags]
bucket = "zigbee"
[[inputs.mqtt_consumer.json_v2]]
measurement_name = "temperature"
[[inputs.mqtt_consumer.topic_parsing]]
topic = "zigbee2mqtt/Home/+/Temp"
tags = "_/_/room/_"
[[inputs.mqtt_consumer.json_v2.field]]
path = "temperature"
type = "float"
[[inputs.mqtt_consumer]]
servers = ["tcp://127.0.0.1:1883"]
topics = [
"solar/inverter"
]
data_format = "json_v2"
exclude_bucket_tag = true
[inputs.mqtt_consumer.tags]
bucket = "solar"
[[inputs.mqtt_consumer.json_v2]]
measurement_name = "generation"
[[inputs.mqtt_consumer.json_v2.field]]
path = "ppv"
rename = "generation"
type = "int"
And now telegraf fails to start with the meaningless message:
E! [telegraf] Error running agent: error loading config file /etc/telegraf/telegraf.conf: plugin inputs.mqtt_consumer: line 1415: configuration specified the fields ["exclude_bucket_tag"], but they weren't used
Which still happens if I comment out exclude_bucket_tag.

Artillery + Playwright, StatsD data not being ingested in InfluxDB correctly by Telegraf (Template not working)

I have some tests written using Artillery + Playwright and I am using the publish-metrics plugin with type influxdb-statsd. I then have the following telegraf.config
[[outputs.influxdb_v2]]
urls = ["http://${INFLUX_DB2_HOST_ADDRESS}:8086"]
token = "${INFLUX_DB2_TOKEN}"
organization = "${INFLUX_DB2_ORGANIZATION}"
bucket = "${INFLUX_DB2_BUCKET}"
[[inputs.statsd]]
protocol = "udp"
max_tcp_connections = 250
tcp_keep_alive = false
service_address = ":8125"
delete_gauges = true
delete_counters = true
delete_sets = true
delete_timings = true
metric_separator = "_"
parse_data_dog_tags = true
datadog_extensions = true
datadog_distributions = false
Data from artillery is sent in this format to statsD
artillery.browser.page.FID.compliance-hub_dashboard.min:3.2|g
artillery.browser.page.FID.compliance-hub_dashboard.max:3.2|g
artillery.browser.page.FID.compliance-hub_dashboard.count:2|g
artillery.browser.page.FID.compliance-hub_dashboard.p50:3.2|g
I would like to set up a telegraf template so that in Influx DB
artillery.browser.page.FID.compliance-hub_dashboard is a measurement and min, max, count and p50 are fields.
How do I do that?
I tried:
templates = [
"measurement.measurement.measurement.measurement.measurement.field",
]
but it's not working. :(
What I see in InfluxDb is a measurement of artillery_browser_page_FID_compliance-hub_dashboard_min with a field of value = 3.2

Telegraf http listener v2: unable to send JSON with string values

I'm trying to send this very simple JSON string to Telegraf to be saved into InfluxDB:
{ "id": "id_123", "value": 10 }
So the request would be this: curl -i -XPOST 'http://localhost:8080/telegraf' --data-binary '{"id": "id_123","value": 10}'
When I make that request, I get the following answer: HTTP/1.1 204 No Content Date: Tue, 20 Apr 2021 13:02:49 GMT but when I check what was written to database, there is only value field:
select * from http_listener_v2
time host influxdb_database value
---- ---- ----------------- -----
1618923747863479914 my.host.com my_db 10
What am I doing wrong?
Here's my Telegraf config:
[global_tags]
[agent]
interval = "10s"
round_interval = true
metric_batch_size = 1000
metric_buffer_limit = 10000
collection_jitter = "0s"
flush_interval = "10s"
flush_jitter = "0s"
precision = ""
hostname = ""
omit_hostname = false
# OUTPUTS
[[outputs.influxdb]]
urls = ["http://127.0.0.1:8086"]
database = "telegraf"
username = "xxx"
password = "xxx"
[outputs.influxdb.tagdrop]
influxdb_database = ["*"]
[[outputs.influxdb]]
urls = ["http://127.0.0.1:8086"]
database = "httplistener"
username = "xxx"
password = "xxx"
[outputs.influxdb.tagpass]
influxdb_database = ["httplistener"]
# INPUTS
## system
[[inputs.cpu]]
percpu = true
totalcpu = true
collect_cpu_time = false
report_active = false
[[inputs.disk]]
ignore_fs = ["tmpfs", "devtmpfs", "devfs", "iso9660", "overlay", "aufs", "squashfs"]
[[inputs.mem]]
[[inputs.swap]]
[[inputs.system]]
## http listener
[[inputs.http_listener_v2]]
service_address = ":8080"
path = "/telegraf"
methods = ["POST", "PUT"]
data_source = "body"
data_format = "json"
[inputs.http_listener_v2.tags]
influxdb_database = "httplistener"
Use json_string_fields = ["id"]

bazel print the versions/tags/hash for all dependencies of a target

I am running into a build error for a bazel target. I have checked the code and I could not find anything wrong. I suspect I might be looking at the wrong version of the code. Is there a way to print out tag/versions/hash of the all the code packages a target is dependent on?
bazel query 'deps(//my:target)' --nohost_deps --noimplicit_deps --output=build
This will print out the BUILD targets of all explicit dependencies a target depends on in the BUILD file format. Here's an example output of running that command in a real project:
# /home/user/code/rules_jvm_external/tests/integration/BUILD:12:1
java_test(
name = "GlobalArtifactExclusionsTest",
deps = ["#global_exclusion_testing//:com_diffplug_durian_durian_core", "#global_exclusion_testing//:com_google_guava_guava", "#global_exclusion_testing//:com_squareup_okhttp3_okhttp", "#maven//:org_hamcrest_hamcrest", "#maven//:org_hamcrest_hamcrest_core"],
srcs = ["//tests/integration:GlobalArtifactExclusionsTest.java"],
test_class = "com.jvm.external.GlobalArtifactExclusionsTest",
)
# /home/user/.cache/bazel/_bazel_user/8484bc4fff18ee4a905b69a9ddb0e143/external/maven/BUILD:103:1
jvm_import(
name = "org_hamcrest_hamcrest_core",
tags = ["maven_coordinates=org.hamcrest:hamcrest-core:2.1"],
jars = ["#maven//:v1/https/jcenter.bintray.com/org/hamcrest/hamcrest-core/2.1/hamcrest-core-2.1.jar"],
deps = ["#maven//:org_hamcrest_hamcrest"],
)
# /home/user/.cache/bazel/_bazel_user/8484bc4fff18ee4a905b69a9ddb0e143/external/maven/BUILD:115:1
jvm_import(
name = "org_hamcrest_hamcrest",
tags = ["maven_coordinates=org.hamcrest:hamcrest:2.1"],
jars = ["#maven//:v1/https/jcenter.bintray.com/org/hamcrest/hamcrest/2.1/hamcrest-2.1.jar"],
deps = [],
)
# /home/user/.cache/bazel/_bazel_user/8484bc4fff18ee4a905b69a9ddb0e143/external/global_exclusion_testing/BUILD:79:1
jvm_import(
name = "com_squareup_okhttp3_okhttp",
tags = ["maven_coordinates=com.squareup.okhttp3:okhttp:3.14.1"],
jars = ["#global_exclusion_testing//:v1/https/repo1.maven.org/maven2/com/squareup/okhttp3/okhttp/3.14.1/okhttp-3.14.1.jar"],
deps = ["#global_exclusion_testing//:com_squareup_okio_okio"],
)
# /home/user/.cache/bazel/_bazel_user/8484bc4fff18ee4a905b69a9ddb0e143/external/global_exclusion_testing/BUILD:91:1
jvm_import(
name = "com_squareup_okio_okio",
tags = ["maven_coordinates=com.squareup.okio:okio:1.17.2"],
jars = ["#global_exclusion_testing//:v1/https/repo1.maven.org/maven2/com/squareup/okio/okio/1.17.2/okio-1.17.2.jar"],
deps = [],
)
# /home/user/.cache/bazel/_bazel_user/8484bc4fff18ee4a905b69a9ddb0e143/external/global_exclusion_testing/BUILD:52:1
jvm_import(
name = "com_google_guava_guava",
tags = ["maven_coordinates=com.google.guava:guava:27.0-jre"],
jars = ["#global_exclusion_testing//:v1/https/repo1.maven.org/maven2/com/google/guava/guava/27.0-jre/guava-27.0-jre.jar"],
deps = ["#global_exclusion_testing//:com_google_guava_listenablefuture", "#global_exclusion_testing//:com_google_code_findbugs_jsr305", "#global_exclusion_testing//:com_google_guava_failureaccess", "#global_exclusion_testing//:com_google_errorprone_error_prone_annotations", "#global_exclusion_testing//:org_checkerframework_checker_qual"],
)
# /home/user/.cache/bazel/_bazel_user/8484bc4fff18ee4a905b69a9ddb0e143/external/global_exclusion_testing/BUILD:102:1
jvm_import(
name = "org_checkerframework_checker_qual",
tags = ["maven_coordinates=org.checkerframework:checker-qual:2.5.2"],
jars = ["#global_exclusion_testing//:v1/https/repo1.maven.org/maven2/org/checkerframework/checker-qual/2.5.2/checker-qual-2.5.2.jar"],
deps = [],
)
# /home/user/.cache/bazel/_bazel_user/8484bc4fff18ee4a905b69a9ddb0e143/external/global_exclusion_testing/BUILD:68:1
jvm_import(
name = "com_google_guava_listenablefuture",
tags = ["maven_coordinates=com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava"],
jars = ["#global_exclusion_testing//:v1/https/repo1.maven.org/maven2/com/google/guava/listenablefuture/9999.0-empty-to-avoid-conflict-with-guava/listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar"],
deps = [],
)
# /home/user/.cache/bazel/_bazel_user/8484bc4fff18ee4a905b69a9ddb0e143/external/global_exclusion_testing/BUILD:41:1
jvm_import(
name = "com_google_guava_failureaccess",
tags = ["maven_coordinates=com.google.guava:failureaccess:1.0"],
jars = ["#global_exclusion_testing//:v1/https/repo1.maven.org/maven2/com/google/guava/failureaccess/1.0/failureaccess-1.0.jar"],
deps = [],
)
# /home/user/.cache/bazel/_bazel_user/8484bc4fff18ee4a905b69a9ddb0e143/external/global_exclusion_testing/BUILD:30:1
jvm_import(
name = "com_google_errorprone_error_prone_annotations",
tags = ["maven_coordinates=com.google.errorprone:error_prone_annotations:2.2.0"],
jars = ["#global_exclusion_testing//:v1/https/repo1.maven.org/maven2/com/google/errorprone/error_prone_annotations/2.2.0/error_prone_annotations-2.2.0.jar"],
deps = [],
)
# /home/user/.cache/bazel/_bazel_user/8484bc4fff18ee4a905b69a9ddb0e143/external/global_exclusion_testing/BUILD:19:1
jvm_import(
name = "com_google_code_findbugs_jsr305",
tags = ["maven_coordinates=com.google.code.findbugs:jsr305:3.0.2"],
jars = ["#global_exclusion_testing//:v1/https/repo1.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar"],
deps = [],
)
# /home/user/.cache/bazel/_bazel_user/8484bc4fff18ee4a905b69a9ddb0e143/external/global_exclusion_testing/BUILD:8:1
jvm_import(
name = "com_diffplug_durian_durian_core",
tags = ["maven_coordinates=com.diffplug.durian:durian-core:1.2.0"],
jars = ["#global_exclusion_testing//:v1/https/repo1.maven.org/maven2/com/diffplug/durian/durian-core/1.2.0/durian-core-1.2.0.jar"],
deps = [],
)

how to regex a list of url paths?

I have a list of url paths:
WHITELIST_PATHS = [ '/assets', '/images', '/javascripts']
How can regex be used to do something like:
allow_access = WHITELIST_PATHS.include? '/assets/application.css'
Idea being that the tested path just needs to start with a whitelist path. Ideas? Thanks
allow_access = WHITELIST_PATHS.any? {|p| '/assets/application.css'.start_with? p }
WHITELIST_PATHS = [ '/assets', '/images', '/javascripts']
# probably should be
# WHITELIST_PATHS = [ '/assets/', '/images/', '/javascripts/']
WHITELIST_REGEXP = /^(#{WHITELIST_PATHS.join("|")})/
allow_access = !!('/assets/application.css' =~ WHITELIST_REGEXP)

Resources