From 62c729b63b31a47fbf77b0681bf664573a8e0792 Mon Sep 17 00:00:00 2001 From: PaulVua Date: Mon, 10 Feb 2025 17:25:34 +0100 Subject: [PATCH] update --- NPM/get_data_modbus.py | 60 ++++++++++++++++++------ NPM/get_data_v2.py | 2 +- config.json.dist | 1 + html/database.html | 93 ++++++++++++++++++++++++++++++++------ html/launcher.php | 17 +++++-- master.py | 25 ++++++---- sqlite/read.py | 7 +++ sqlite/read_select_date.py | 59 ++++++++++++++++++++++++ 8 files changed, 223 insertions(+), 41 deletions(-) create mode 100644 sqlite/read_select_date.py diff --git a/NPM/get_data_modbus.py b/NPM/get_data_modbus.py index 98838f7..db30a25 100755 --- a/NPM/get_data_modbus.py +++ b/NPM/get_data_modbus.py @@ -1,12 +1,19 @@ ''' + _ _ ____ __ __ + | \ | | _ \| \/ | + | \| | |_) | |\/| | + | |\ | __/| | | | + |_| \_|_| |_| |_| + Script to get NPM data via Modbus need parameter: port -/usr/bin/python3 /var/www/nebuleair_pro_4g/NPM/get_data_modbus.py ttyAMA5 +/usr/bin/python3 /var/www/nebuleair_pro_4g/NPM/get_data_modbus.py Modbus RTU [Slave Address][Function Code][Starting Address][Quantity of Registers][CRC] Pour récupérer les 5 cannaux (a partir du registre 0x80) +Donnée actualisée toutes les 10 secondes Request \x01\x03\x00\x80\x00\x0A\xE4\x1E @@ -24,13 +31,28 @@ import requests import json import sys import crcmod +import sqlite3 -parameter = sys.argv[1:] # Exclude the script name -#print("Parameters received:") -port='/dev/'+parameter[0] +# Connect to the SQLite database +conn = sqlite3.connect("/var/www/nebuleair_pro_4g/sqlite/sensors.db") +cursor = conn.cursor() + +def load_config(config_file): + try: + with open(config_file, 'r') as file: + config_data = json.load(file) + return config_data + except Exception as e: + print(f"Error loading config file: {e}") + return {} + +# Load the configuration data +config_file = '/var/www/nebuleair_pro_4g/config.json' +config = load_config(config_file) +npm_solo_port = config.get('NPM_solo_port', '') #port du NPM solo ser = serial.Serial( - port=port, + port=npm_solo_port, baudrate=115200, parity=serial.PARITY_EVEN, stopbits=serial.STOPBITS_ONE, @@ -51,15 +73,20 @@ crc_high = (crc >> 8) & 0xFF # Append CRC to the frame request = data + bytes([crc_low, crc_high]) -print(f"Request frame: {request.hex()}") +#print(f"Request frame: {request.hex()}") -ser.write(request) +ser.write(request) + +#GET RTC TIME from SQlite +cursor.execute("SELECT * FROM timestamp_table LIMIT 1") +row = cursor.fetchone() # Get the first (and only) row +rtc_time_str = row[1] # '2025-02-07 12:30:45' while True: try: byte_data = ser.readline() formatted = ''.join(f'\\x{byte:02x}' for byte in byte_data) - print(formatted) + #print(formatted) # Extract LSW (first 2 bytes) and MSW (last 2 bytes) lsw_channel1 = int.from_bytes(byte_data[3:5], byteorder='little') @@ -82,12 +109,18 @@ while True: msw_chanel5 = int.from_bytes(byte_data[21:23], byteorder='little') raw_value_channel5 = (msw_chanel5 << 16) | lsw_channel5 - print(f"Channel 1 (0.2->0.5): {raw_value_channel1}") - print(f"Channel 2 (0.5->1.0): {raw_value_channel2}") - print(f"Channel 3 (1.0->2.5): {raw_value_channel3}") - print(f"Channel 4 (2.5->5.0): {raw_value_channel4}") - print(f"Channel 5 (5.0->10.): {raw_value_channel5}") + #print(f"Channel 1 (0.2->0.5): {raw_value_channel1}") + #print(f"Channel 2 (0.5->1.0): {raw_value_channel2}") + #print(f"Channel 3 (1.0->2.5): {raw_value_channel3}") + #print(f"Channel 4 (2.5->5.0): {raw_value_channel4}") + #print(f"Channel 5 (5.0->10.): {raw_value_channel5}") + cursor.execute(''' + INSERT INTO data_NPM_5channels (timestamp,PM_ch1, PM_ch2, PM_ch3, PM_ch4, PM_ch5) VALUES (?,?,?,?,?,?)''' + , (rtc_time_str,raw_value_channel1,raw_value_channel2,raw_value_channel3,raw_value_channel4,raw_value_channel5)) + + # Commit and close the connection + conn.commit() break @@ -101,3 +134,4 @@ while True: time.sleep(3) exit() +conn.close() diff --git a/NPM/get_data_v2.py b/NPM/get_data_v2.py index 4609ce8..e38e8f8 100644 --- a/NPM/get_data_v2.py +++ b/NPM/get_data_v2.py @@ -5,7 +5,7 @@ | |\ | __/| | | | |_| \_|_| |_| |_| -Script to get NPM values +Script to get NPM values (PM1, PM2.5 and PM10) PM and the sensor temp/hum And store them inside sqlite database Uses RTC module for timing (from SQLite db) diff --git a/config.json.dist b/config.json.dist index db51d2c..5ff1daa 100755 --- a/config.json.dist +++ b/config.json.dist @@ -4,6 +4,7 @@ "boot_log": true, "modem_config_mode": false, "NPM/get_data_v2.py": true, + "NPM/get_data_modbus.py":false, "loop/SARA_send_data_v2.py": true, "RTC/save_to_db.py": true, "BME280/get_data_v2.py": true, diff --git a/html/database.html b/html/database.html index 568bf15..a1b4c07 100644 --- a/html/database.html +++ b/html/database.html @@ -58,8 +58,21 @@
Consulter la base de donnée
- - + +
+ + +
+ + + + + +
@@ -68,8 +81,18 @@
Télécharger les données
- - + +
+ + + + +
+ + + + +
@@ -123,11 +146,6 @@ - - - - - window.onload = function() { fetch('../config.json') // Replace 'deviceID.txt' with 'config.json' .then(response => response.json()) // Parse response as JSON @@ -168,11 +186,21 @@ // TABLE PM -function get_data_sqlite(table, limit, download) { - console.log("Getting data for table mesure PM"); +function get_data_sqlite(table, limit, download , startDate = "", endDate = "") { + console.log(`Getting data for table: ${table}, limit: ${limit}, download: ${download}, start: ${startDate}, end: ${endDate}`); + // Construct URL parameters dynamically + let url = `launcher.php?type=table_mesure&table=${table}&limit=${limit}&download=${download}`; + + // Add date parameters if downloading + if (download) { + url += `&start_date=${startDate}&end_date=${endDate}`; + } + + console.log(url); + $.ajax({ - url: 'launcher.php?type=table_mesurePM&table='+table+'&limit='+limit+'&download='+download, + url: url, dataType: 'text', // Specify that you expect a JSON response method: 'GET', // Use GET or POST depending on your needs success: function(response) { @@ -207,6 +235,16 @@ function get_data_sqlite(table, limit, download) { Humidity (%) Pressure (hPa) `; + } else if (table === "data_NPM_5channels") { + tableHTML += ` + Timestamp + PM_ch1 (nb/L) + PM_ch2 (nb/L) + PM_ch3 (nb/L) + PM_ch4 (nb/L) + PM_ch5 (nb/L) + + `; } tableHTML += ``; @@ -233,6 +271,17 @@ function get_data_sqlite(table, limit, download) { ${columns[3]} `; } + else if (table === "data_NPM_5channels") { + tableHTML += ` + ${columns[0]} + ${columns[1]} + ${columns[2]} + ${columns[3]} + ${columns[4]} + ${columns[5]} + + `; + } tableHTML += ""; }); @@ -250,6 +299,19 @@ function get_data_sqlite(table, limit, download) { } + +function getSelectedLimit() { + return document.getElementById("records_limit").value; +} + +function getStartDate() { + return document.getElementById("start_date").value || "2025-01-01"; // Default to a safe date +} + +function getEndDate() { + return document.getElementById("end_date").value || "2025-12-31"; // Default to a safe date +} + function downloadCSV(response, table) { let rows = response.trim().split("\n"); @@ -257,9 +319,12 @@ function downloadCSV(response, table) { // Add headers based on table type if (table === "data_NPM") { - csvContent += "Timestamp,PM1,PM2.5,PM10,Temperature (°C),Humidity (%)\n"; + csvContent += "TimestampUTC,PM1,PM2.5,PM10,Temperature_sensor,Humidity_sensor\n"; } else if (table === "data_BME280") { - csvContent += "Timestamp,Temperature (°C),Humidity (%),Pressure (hPa)\n"; + csvContent += "TimestampUTC,Temperature (°C),Humidity (%),Pressure (hPa)\n"; + } + else if (table === "data_NPM_5channels") { + csvContent += "TimestampUTC,PM_ch1,PM_ch2,PM_ch3,PM_ch4,PM_ch5\n"; } // Format rows as CSV diff --git a/html/launcher.php b/html/launcher.php index 6bb7186..4dbb49a 100755 --- a/html/launcher.php +++ b/html/launcher.php @@ -203,14 +203,23 @@ if ($type == "BME280") { } -if ($type == "table_mesurePM") { +if ($type == "table_mesure") { $table=$_GET['table']; $limit=$_GET['limit']; $download=$_GET['download']; - $command = 'sudo /usr/bin/python3 /var/www/nebuleair_pro_4g/sqlite/read.py '.$table.' '.$limit; - $output = shell_exec($command); - echo $output; + if ($download==="false") { + $command = 'sudo /usr/bin/python3 /var/www/nebuleair_pro_4g/sqlite/read.py '.$table.' '.$limit; + $output = shell_exec($command); + echo $output; + } else{ + $start_date=$_GET['start_date']; + $end_date=$_GET['end_date']; + $command = 'sudo /usr/bin/python3 /var/www/nebuleair_pro_4g/sqlite/read_select_date.py '.$table.' '.$start_date.' '.$end_date; + $output = shell_exec($command); + echo $output; + } + } # SARA R4 COMMANDS diff --git a/master.py b/master.py index 805995b..ee80591 100644 --- a/master.py +++ b/master.py @@ -64,26 +64,33 @@ def load_config(): with open(CONFIG_FILE, "r") as f: return json.load(f) -def run_script(script_name, interval): - """Run a script in a loop with a delay.""" +def run_script(script_name, interval, delay=0): + """Run a script in a synchronized loop with an optional start delay.""" script_path = os.path.join(SCRIPT_DIR, script_name) # Build full path + next_run = time.monotonic() + delay # Apply the initial delay + while True: config = load_config() if config.get(script_name, True): # Default to True if not found subprocess.run(["python3", script_path]) - time.sleep(interval) + + # Wait until the next exact interval + next_run += interval + sleep_time = max(0, next_run - time.monotonic()) # Prevent negative sleep times + time.sleep(sleep_time) # Define scripts and their execution intervals (seconds) SCRIPTS = [ - ("NPM/get_data_v2.py", 60), # Get NPM data every 60s - ("loop/SARA_send_data_v2.py", 60), # Send data every 60 seconds - ("RTC/save_to_db.py", 1), # SAVE RTC time every 1 second - ("BME280/get_data_v2.py", 120) # Get BME280 data every 120 seconds + ("RTC/save_to_db.py", 1, 0), # SAVE RTC time every 1 second, no delay + ("NPM/get_data_v2.py", 60, 0), # Get NPM data every 60s, no delay + ("NPM/get_data_modbus.py", 10, 2), # Get NPM data (modbus 5 channels) every 10s, with 2s delay + ("loop/SARA_send_data_v2.py", 60, 1), # Send data every 60 seconds, with 2s delay + ("BME280/get_data_v2.py", 120, 0) # Get BME280 data every 120 seconds, no delay ] # Start threads for enabled scripts -for script_name, interval in SCRIPTS: - thread = threading.Thread(target=run_script, args=(script_name, interval), daemon=True) +for script_name, interval, delay in SCRIPTS: + thread = threading.Thread(target=run_script, args=(script_name, interval, delay), daemon=True) thread.start() # Keep the main script running diff --git a/sqlite/read.py b/sqlite/read.py index 6a283ad..39732dd 100755 --- a/sqlite/read.py +++ b/sqlite/read.py @@ -8,6 +8,13 @@ Script to read data from a sqlite database /usr/bin/python3 /var/www/nebuleair_pro_4g/sqlite/read.py data_NPM 10 +Available table are +data_NPM +data_NPM_5channels +data_BME280 +data_envea +timestamp_table + ''' import sqlite3 diff --git a/sqlite/read_select_date.py b/sqlite/read_select_date.py new file mode 100644 index 0000000..7ac6535 --- /dev/null +++ b/sqlite/read_select_date.py @@ -0,0 +1,59 @@ +''' + ____ ___ _ _ _ + / ___| / _ \| | (_) |_ ___ + \___ \| | | | | | | __/ _ \ + ___) | |_| | |___| | || __/ + |____/ \__\_\_____|_|\__\___| + +Script to read data from a sqlite database using start date and end date + +/usr/bin/python3 /var/www/nebuleair_pro_4g/sqlite/read_select_date.py data_NPM 2025-02-09 2025-02-11 + +Available table are +data_NPM +data_NPM_5channels +data_BME280 +data_envea +timestamp_table + +''' + +import sqlite3 +import sys + +parameter = sys.argv[1:] # Exclude the script name +#print("Parameters received:") +table_name=parameter[0] +start_date=parameter[1] +end_date=parameter[2] + +# Convert to full timestamp range +start_timestamp = f"{start_date} 00:00:00" +end_timestamp = f"{end_date} 23:59:59" + +# Connect to the SQLite database +conn = sqlite3.connect("/var/www/nebuleair_pro_4g/sqlite/sensors.db") +cursor = conn.cursor() + +# Retrieve the last 10 sensor readings +#cursor.execute("SELECT * FROM data_NPM ORDER BY timestamp DESC LIMIT 10") +#cursor.execute("SELECT * FROM data_BME280 ORDER BY timestamp DESC LIMIT 10") +#cursor.execute("SELECT * FROM timestamp_table") +if table_name == "timestamp_table": + cursor.execute("SELECT * FROM timestamp_table") + +else: + query = f"SELECT * FROM {table_name} WHERE timestamp BETWEEN ? AND ? ORDER BY timestamp ASC" + cursor.execute(query, (start_timestamp, end_timestamp)) + + +rows = cursor.fetchall() +rows.reverse() # Reverse the order in Python (to get ascending order) + + +# Display the results +for row in rows: + print(row) + +# Close the database connection +conn.close()