diff options
author | Rasmus Luha <rasmus.luha@ut.ee> | 2025-05-11 15:43:31 +0300 |
---|---|---|
committer | Rasmus Luha <rasmus.luha@ut.ee> | 2025-05-11 15:43:31 +0300 |
commit | 137b8a988f77e957feed698494f7143ac06a7b51 (patch) | |
tree | d330750cc63ebbe38a0187002c7cd637eb3f687e /modules/telegraf/telegraf_utils.py | |
parent | 9a332ab302a2f05ce4924a66647dec03fb3b1366 (diff) |
add diffJsonRootPath template to telegraf
Diffstat (limited to 'modules/telegraf/telegraf_utils.py')
-rw-r--r-- | modules/telegraf/telegraf_utils.py | 70 |
1 files changed, 65 insertions, 5 deletions
diff --git a/modules/telegraf/telegraf_utils.py b/modules/telegraf/telegraf_utils.py index 62bc4ea..2e72fbf 100644 --- a/modules/telegraf/telegraf_utils.py +++ b/modules/telegraf/telegraf_utils.py @@ -17,8 +17,6 @@ def modify_input(new_pipeline_path, key, value): ##modify_input("templates/basic_ETL.toml", "test_pipers.toml, "urls", ["stillTesting"]) - -## TODO def modify_agent(new_pipeline_path, key, value): data = toml.load(new_pipeline_path) pluggin = data["agent"] @@ -34,9 +32,6 @@ def modify_agent(new_pipeline_path, key, value): toml.dump(data, f) - - -## TODO def modify_output(new_pipeline_path, key, value): data = toml.load(new_pipeline_path) pluggin = data["outputs"]["influxdb"][0] @@ -50,3 +45,68 @@ def modify_output(new_pipeline_path, key, value): with open(new_pipeline_path, "w") as f: toml.dump(data, f) + + + + +### different_jsonPaths_ETL template funcs ### + + +#def modify_processorsConventer(new_pipeline_path, key, value): +# data = toml.load(new_pipeline_path) +# #print(data) +# pluggin = data["processors"]["converter"][0]["fields"] +# print(pluggin) +# +# if key in pluggin: +# pluggin[key] = value +# with open(new_pipeline_path, "w") as f: +# toml.dump(data, f) +# +# +#def modify_processorsRename(new_pipeline_path, key, value): +# data = toml.load(new_pipeline_path) +# pluggin = data["processors"]["rename"][0]["replace"][0] +# print(pluggin) +# pluggin = data["processors"]["rename"][0]["replace"][1] +# print(pluggin) +# +# if key in pluggin: +# pluggin[key] = value +# with open(new_pipeline_path, "w") as f: +# toml.dump(data, f) +# + + + + + + + + +### ChatGPT was used in the procesess of creating this function +## def add_new_replace_block(new_pipeline_name): +## +## new_block = """ [[processors.rename.replace]] +## field = "placeholder" +## dest = "placeholder" +## """ +## +## with open(new_pipeline_name, "r") as file: +## lines = file.readlines() +## +## # Find the last occurrence of '[[processors.rename.replace]]' +## insert_index = -1 +## for i, line in enumerate(lines): +## if line.strip().startswith("[[processors.rename.replace]]"): +## insert_index = i +## +## while insert_index + 1 < len(lines) and lines[insert_index + 1].startswith(" "): +## insert_index += 1 +## +## # Insert the new block +## lines.insert(insert_index + 1, new_block + "\n") +## +## with open(new_pipeline_name, "w") as file: +## file.writelines(lines) +## |