Skip to content

Commit

Permalink
Easier json handling in databases without a native json type. SQLPage…
Browse files Browse the repository at this point in the history
… now detects when you use a json function in SQLite or MariaDB to generate a column, and automatically converts the resulting string to a json object. This allows easily using components that take json parameters (like the new columns component) in MariaDB and SQLite.

fixes #633
  • Loading branch information
lovasoa committed Oct 5, 2024
1 parent cf9812c commit 9e58075
Show file tree
Hide file tree
Showing 6 changed files with 148 additions and 5 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

- **Fix**: the search feature in the shell component was not working when no menu item was defined.
- Add support for encrypted Microsoft SQL Server connections. This finally allows connecting to databases that refuse clear-text connections, such as those hosted on Azure.
- Easier json handling in databases without a native json type. SQLPage now detects when you use a json function in SQLite or MariaDB to generate a column, and automatically converts the resulting string to a json object. This allows easily using components that take json parameters (like the new columns component) in MariaDB and SQLite.

## 0.29.0 (2024-09-25)
- New columns component: `columns`. Useful to display a comparison between items, or large key figures to an user.
Expand Down
2 changes: 2 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ services:
environment:
MYSQL_ROOT_PASSWORD: Password123!
MYSQL_DATABASE: sqlpage

mssql:
profiles: ["mssql"]
ports: ["1433:1433"]
Expand All @@ -41,6 +42,7 @@ services:
timeout: 3s
retries: 10
start_period: 10s

mariadb:
profiles: ["mariadb"]
ports: ["3306:3306"]
Expand Down
32 changes: 31 additions & 1 deletion src/webserver/database/execute_queries.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ use futures_util::StreamExt;
use std::borrow::Cow;
use std::collections::HashMap;
use std::pin::Pin;
use serde_json::Value;

use super::csv_import::run_csv_import;
use super::sql::{
Expand Down Expand Up @@ -59,6 +60,7 @@ pub fn stream_query_results_with_conn<'a>(
let is_err = elem.is_err();
let mut query_result = parse_single_sql_result(&stmt.query, elem);
apply_delayed_functions(request, &stmt.delayed_functions, &mut query_result).await?;
apply_json_columns(&mut query_result, &stmt.json_columns);
for i in parse_dynamic_rows(query_result) {
yield i;
}
Expand Down Expand Up @@ -333,6 +335,34 @@ fn json_to_fn_param(json: serde_json::Value) -> Option<Cow<'static, str>> {
}
}

fn apply_json_columns(item: &mut DbItem, json_columns: &[String]) {
if let DbItem::Row(Value::Object(ref mut row)) = item {
for column in json_columns {
if let Some(value) = row.get_mut(column) {
if let Value::String(json_str) = value {
if let Ok(parsed_json) = serde_json::from_str(json_str) {
log::trace!("Parsed JSON column {column}: {parsed_json}");
*value = parsed_json;
} else {
log::warn!("The column {column} contains invalid JSON: {json_str}");
}
} else if let Value::Array(array) = value {
for item in array {
if let Value::String(json_str) = item {
if let Ok(parsed_json) = serde_json::from_str(json_str) {
log::trace!("Parsed JSON array item: {parsed_json}");
*item = parsed_json;
}
}
}
}
} else {
log::warn!("The column {column} is missing from the result set, so it cannot be converted to JSON.");
}
}
}
}

pub struct StatementWithParams<'a> {
sql: &'a str,
arguments: AnyArguments<'a>,
Expand All @@ -355,4 +385,4 @@ impl<'q> sqlx::Execute<'q, Any> for StatementWithParams<'q> {
// Let sqlx create a prepared statement the first time it is executed, and then reuse it.
true
}
}
}
105 changes: 104 additions & 1 deletion src/webserver/database/sql.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ use async_trait::async_trait;
use sqlparser::ast::{
BinaryOperator, CastKind, CharacterLength, DataType, Expr, Function, FunctionArg,
FunctionArgExpr, FunctionArgumentList, FunctionArguments, Ident, ObjectName,
OneOrManyWithParens, SelectItem, Statement, Value, VisitMut, VisitorMut,
OneOrManyWithParens, SelectItem, SetExpr, Statement, Value, VisitMut, VisitorMut,
};
use sqlparser::dialect::{Dialect, MsSqlDialect, MySqlDialect, PostgreSqlDialect, SQLiteDialect};
use sqlparser::parser::{Parser, ParserError};
Expand Down Expand Up @@ -64,6 +64,9 @@ pub(super) struct StmtWithParams {
/// Functions that are called on the result set after the query has been executed,
/// and which can be passed the result of the query as an argument.
pub delayed_functions: Vec<DelayedFunctionCall>,
/// Columns that are JSON columns, and which should be converted to JSON objects after the query is executed.
/// Only relevant for databases that do not have a native JSON type, and which return JSON values as text.
pub json_columns: Vec<String>,
}

#[derive(Debug)]
Expand Down Expand Up @@ -136,6 +139,7 @@ fn parse_single_statement(
query,
params,
delayed_functions: Vec::new(),
json_columns: Vec::new(),
},
});
}
Expand All @@ -148,6 +152,7 @@ fn parse_single_statement(
}
let delayed_functions = extract_toplevel_functions(&mut stmt);
remove_invalid_function_calls(&mut stmt, &mut params);
let json_columns = extract_json_columns(&stmt, db_kind);
let query = format!(
"{stmt}{semicolon}",
semicolon = if semicolon { ";" } else { "" }
Expand All @@ -157,6 +162,7 @@ fn parse_single_statement(
query,
params,
delayed_functions,
json_columns,
}))
}

Expand Down Expand Up @@ -771,6 +777,64 @@ fn sqlpage_func_name(func_name_parts: &[Ident]) -> &str {
}
}

fn extract_json_columns(stmt: &Statement, db_kind: AnyKind) -> Vec<String> {
// Only extract JSON columns for databases without native JSON support
if matches!(db_kind, AnyKind::Postgres | AnyKind::Mssql) {
return Vec::new();
}

let mut json_columns = Vec::new();

if let Statement::Query(query) = stmt {
if let SetExpr::Select(select) = query.body.as_ref() {
for item in &select.projection {
if let SelectItem::ExprWithAlias { expr, alias } = item {
if is_json_function(expr) {
json_columns.push(alias.value.clone());
log::trace!("Found JSON column: {alias}");
}
}
}
}
}

json_columns
}

fn is_json_function(expr: &Expr) -> bool {
match expr {
Expr::Function(function) => {
if let [Ident { value, .. }] = function.name.0.as_slice() {
[
"json_object",
"json_array",
"json_build_object",
"json_build_array",
"to_json",
"to_jsonb",
"json_agg",
"jsonb_agg",
"json_arrayagg",
"json_objectagg",
"json_group_array",
"json_group_object",
]
.iter()
.any(|&func| value.eq_ignore_ascii_case(func))
} else {
false
}
}
Expr::Cast { data_type, .. } => {
matches!(data_type, DataType::JSON | DataType::JSONB)
|| (matches!(data_type, DataType::Custom(ObjectName(parts), _) if
(parts.len() == 1)
&& (parts[0].value.eq_ignore_ascii_case("json"))))
}
_ => false,
}
}

#[cfg(test)]
mod test {
use super::super::sqlpage_functions::functions::SqlPageFunctionName;
Expand Down Expand Up @@ -1131,4 +1195,43 @@ mod test {
None
);
}

#[test]
fn test_extract_json_columns() {
let sql = r#"
WITH json_cte AS (
SELECT json_build_object('a', x, 'b', y) AS cte_json
FROM generate_series(1, 3) x
JOIN generate_series(4, 6) y ON true
)
SELECT
json_object('key', 'value') AS json_col1,
json_array(1, 2, 3) AS json_col2,
(SELECT json_build_object('nested', subq.val)
FROM (SELECT AVG(x) AS val FROM generate_series(1, 5) x) subq
) AS json_col3, -- not supported because of the subquery
CASE
WHEN EXISTS (SELECT 1 FROM json_cte WHERE cte_json->>'a' = '2')
THEN to_json(ARRAY(SELECT cte_json FROM json_cte))
ELSE json_build_array()
END AS json_col4, -- not supported because of the CASE
json_unknown_fn(regular_column) AS non_json_col,
CAST(json_col1 AS json) AS json_col6
FROM some_table
CROSS JOIN json_cte
WHERE json_typeof(json_col1) = 'object'
"#;

let stmt = parse_postgres_stmt(sql);
let json_columns = extract_json_columns(&stmt, AnyKind::Sqlite);

assert_eq!(
json_columns,
vec![
"json_col1".to_string(),
"json_col2".to_string(),
"json_col6".to_string()
]
);
}
}
6 changes: 6 additions & 0 deletions tests/sql_test_files/it_works_columns_component_json.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
select 'columns' as component;

select
JSON_OBJECT('description', 'It works !') as item,
JSON_OBJECT('description', 'It works !') as item
;
7 changes: 4 additions & 3 deletions tests/upload_csv_test.sql
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
create table people(name text, age text);
copy people(name, age) from 'people_file' with (format csv, header true);
drop table if exists sqlpage_people_test_table;
create table sqlpage_people_test_table(name text, age text);
copy sqlpage_people_test_table(name, age) from 'people_file' with (format csv, header true);
select 'text' as component,
name || ' is ' || age || ' years old. ' as contents
from people;
from sqlpage_people_test_table;

0 comments on commit 9e58075

Please sign in to comment.