#!/bin/bash # Build the project dune build # Example 1: Convert a single .rec file to stdout echo "Converting single file to stdout:" dune exec owntracks2clickhouse avsm/avsm-ip15/2025-08.rec | head -5 # Example 2: Convert a single .rec file to output file echo -e "\nConverting single file to output.jsonl:" dune exec owntracks2clickhouse avsm/avsm-ip15/2025-08.rec -o single_output.jsonl echo "Created single_output.jsonl" # Example 3: Process all .rec files in a directory recursively echo -e "\nProcessing all .rec files recursively:" dune exec owntracks2clickhouse avsm -r -o all_records.jsonl # Example 4: Create ClickHouse table and import data cat << 'EOF' To import into ClickHouse, create a table like this: CREATE TABLE owntracks_locations ( timestamp DateTime64(3), timestamp_epoch UInt32, point Point, latitude Float64, longitude Float64, altitude Nullable(Float64), accuracy Nullable(Float64), battery Nullable(UInt8), tracker_id Nullable(String) ) ENGINE = MergeTree() ORDER BY (tracker_id, timestamp); Then import the JSON lines file: clickhouse-client --query="INSERT INTO owntracks_locations FORMAT JSONEachRow" < all_records.jsonl Or using clickhouse-local for testing: clickhouse-local --query=" SELECT tracker_id, toDate(timestamp) as date, count() as points, round(avg(battery), 2) as avg_battery FROM file('all_records.jsonl', 'JSONEachRow') GROUP BY tracker_id, date ORDER BY date DESC LIMIT 10 " EOF