Commit 19b51f67 authored by neop's avatar neop
Browse files

split including empty strings and gracefully parse doubles

parent df9f1688
......@@ -75,35 +75,46 @@ class DayUploader {
}
private static void put_data_into_influxdb(File saved_csv_file, Sensor sensor) throws FileNotFoundException, IOException {
var tag_columns = List.of("sensor_id", "sensor_type", "location");
var example_parsing_exception = new Object(){ NumberFormatException e = null; };
try (var csv = new BufferedReader(new FileReader(saved_csv_file))) {
var index_of_field = get_index_of_field_map(csv.readLine());
var tag_columns = Arrays.asList("sensor_id", "sensor_type", "location");
var field_columns = new HashSet<>(index_of_field.keySet());
field_columns.remove("timestamp");
field_columns.removeAll(tag_columns);
csv.lines()
.map(line -> line.split(";"))
.map(line -> line.split(";", -1))
.forEach(data -> {
var point = Point.measurement("sensor")
.time(Util.parseAnyDateTime(data[index_of_field.get("timestamp")]), WritePrecision.NS);
for (var tag_column : tag_columns) {
point.addTag(tag_column, (String) sensor.properties.get(tag_column));
for (var tag : tag_columns) {
point.addTag(tag, (String) sensor.properties.get(tag));
}
for (var field_column : field_columns) {
point.addField(field_column, data[index_of_field.get(field_column)]);
for (var field : field_columns) {
try {
point.addField(field, Double.parseDouble(data[index_of_field.get(field)]));
} catch (NumberFormatException e) {
example_parsing_exception.e = e;
}
}
Main.influxdb_write_api.writePoint(point);
});
}
);
}
if (example_parsing_exception.e != null) {
Main.glogger.fine("there were lines with columns where I could not parse double from string \"%s\" due to for example " + example_parsing_exception.e);
}
Main.influxdb_write_api.flush();
}
private static Map<String, Integer> get_index_of_field_map(String header_line) throws IOException {
var header = header_line.split(";");
var header = header_line.split(";", -1);
var index_of_field = new HashMap<String, Integer>();
for (int i = 0; i < header.length; ++i) {
index_of_field.put(header[i], i);
......
......@@ -109,17 +109,17 @@ public class Main {
}
private static void run() throws Exception {
System.out.println("starting influx query");
var tables = influxDBClient.getQueryApi().query("from(bucket:\"bucket0\") |> range(start: -10000d1h, stop: -1d)");
// System.out.println("starting influx query");
// var tables = influxDBClient.getQueryApi().query("from(bucket:\"bucket0\") |> range(start: -10000d1h, stop: -1d)");
for (var fluxTable : tables) {
System.out.println("table " + fluxTable);
for (var record : fluxTable.getRecords()) {
System.out.println(String.format("%s %s: %s %s", record.getTime(), record.getMeasurement(), record.getField(), record.getValue()));
record.getValues().forEach((key, value) -> System.out.println(key + ":" + value));
}
}
System.out.println("finished influx query");
// for (var fluxTable : tables) {
// System.out.println("table " + fluxTable);
// for (var record : fluxTable.getRecords()) {
// System.out.println(String.format("%s %s: %s %s", record.getTime(), record.getMeasurement(), record.getField(), record.getValue()));
// record.getValues().forEach((key, value) -> System.out.println(key + ":" + value));
// }
// }
// System.out.println("finished influx query");
try_uploading_failed_sensors();
upload_all_days();
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment