Skip to content

Commit

Permalink
line continuation reformat glue strings
Browse files Browse the repository at this point in the history
  • Loading branch information
tomwilsonsco committed Sep 13, 2024
1 parent 310aa16 commit 7a5a234
Show file tree
Hide file tree
Showing 12 changed files with 132 additions and 151 deletions.
97 changes: 43 additions & 54 deletions R/create_replace_table.R
Original file line number Diff line number Diff line change
Expand Up @@ -138,20 +138,19 @@ check_existing_table <- function(db_params,
resize_datatypes(compare_col_df, db_params)

message(glue::glue(
"Checked existing columns in ",
"{db_params$schema}.{db_params$table_name}",
"are compatible with those in the dataframe to be loaded.",
.sep = " "
"Checked existing columns in \\
{db_params$schema}.{db_params$table_name} \\
are compatible with those in the dataframe to be loaded."
))
}


alter_sql_character_col <- function(db_params,
column_name,
new_char_type) {
sql <- glue::glue_sql("ALTER TABLE ",
"{`quoted_schema_tbl(db_params$schema, db_params$table_name)`} ",
"ALTER COLUMN {`column_name`} {DBI::SQL(new_char_type)};",
sql <- glue::glue_sql("ALTER TABLE \\
{`quoted_schema_tbl(db_params$schema, db_params$table_name)`} \\
ALTER COLUMN {`column_name`} {DBI::SQL(new_char_type)};",
.con = DBI::ANSI()
)

Expand All @@ -162,9 +161,9 @@ alter_sql_character_col <- function(db_params,
FALSE
)
message(glue::glue(
"Resizing column {column_name}",
"to {new_char_type}."
), .sep = " ")
"Resizing column {column_name} \\
to {new_char_type}."
))
}

id_col_name <- function(table_name) {
Expand All @@ -177,9 +176,9 @@ sql_create_table <- function(schema, table_name, metadata_df) {
metadata_df$column_name != paste0(table_name, "ID"),
]

initial_sql <- glue::glue_sql("CREATE TABLE ",
"{`quoted_schema_tbl(schema, table_name)`} (",
"{`id_col_name(table_name)`} INT NOT NULL IDENTITY PRIMARY KEY,",
initial_sql <- glue::glue_sql("CREATE TABLE \\
{`quoted_schema_tbl(schema, table_name)`} (\\
{`id_col_name(table_name)`} INT NOT NULL IDENTITY PRIMARY KEY,",
.con = DBI::ANSI()
)

Expand All @@ -203,11 +202,11 @@ sql_versioned_table <- function(sql, db_params) {
)
# The versioned table sql
glue::glue_sql(sql,
" \"SysStartTime\" DATETIME2 GENERATED ALWAYS AS ROW START NOT NULL, ",
"\"SysEndTime\" DATETIME2 GENERATED ALWAYS AS ROW END NOT NULL, ",
"PERIOD FOR SYSTEM_TIME (SysStartTime, SysEndTime)) ",
"WITH (SYSTEM_VERSIONING = ON (HISTORY_TABLE = ",
"{`history_table`}));",
" \"SysStartTime\" DATETIME2 GENERATED ALWAYS AS ROW START NOT NULL, \\
\"SysEndTime\" DATETIME2 GENERATED ALWAYS AS ROW END NOT NULL, \\
PERIOD FOR SYSTEM_TIME (SysStartTime, SysEndTime)) \\
WITH (SYSTEM_VERSIONING = ON (HISTORY_TABLE = \\
{`history_table`}));",
.con = DBI::ANSI()
)
}
Expand Down Expand Up @@ -245,9 +244,8 @@ create_staging_table <- function(db_params, dataframe) {
FALSE
)
message(glue::glue(
"Table: {db_params$schema}.{staging_name}",
"successfully created in database.",
.sep = " "
"Table: {db_params$schema}.{staging_name} \\
successfully created in database."
))
}

Expand Down Expand Up @@ -276,9 +274,8 @@ populate_staging_table <- function(db_params,
)
batch_list <- get_df_batches(dataframe = dataframe, batch_size = batch_size)
message(glue::glue(
"Loading to staging in {length(batch_list$batch_starts)}",
"batches of up to {format(batch_size, scientific = FALSE)} rows...",
.sep = " "
"Loading to staging in {length(batch_list$batch_starts)} \\
batches of up to {format(batch_size, scientific = FALSE)} rows..."
))
for (i in seq_along(batch_list$batch_starts)) {
batch_start <- batch_list$batch_starts[[i]]
Expand All @@ -296,17 +293,15 @@ populate_staging_table <- function(db_params,
},
error = function(cond) {
stop(glue::glue(
"Failed to write staging",
"data to database.\n", cond,
.sep = " "
"Failed to write staging \\
data to database.\n", cond
), call. = FALSE)
}
)
message(glue::glue(
"Loaded rows {format(batch_start, scientific = FALSE)}",
"- {format(batch_end, scientific = FALSE)} of",
"{tail(batch_list$batch_ends, 1)}",
.sep = " "
"Loaded rows {format(batch_start, scientific = FALSE)} \\
- {format(batch_end, scientific = FALSE)} of \\
{tail(batch_list$batch_ends, 1)}"
))
}
DBI::dbDisconnect(connection)
Expand All @@ -317,10 +312,10 @@ create_insert_sql <- function(db_params, metadata_df) {
paste0(db_params$table_name, "ID"), ]

Check warning on line 312 in R/create_replace_table.R

View workflow job for this annotation

GitHub Actions / lint

file=R/create_replace_table.R,line=312,col=4,[indentation_linter] Indentation should be 31 spaces but is 4 spaces.

glue::glue_sql(
"INSERT INTO ",
"{`quoted_schema_tbl(db_params$schema, db_params$table_name)`} ",
"({`metadata_df$column_name`*}) select {`metadata_df$column_name`*} from ",
"{`quoted_schema_tbl(db_params$schema,
"INSERT INTO \\
{`quoted_schema_tbl(db_params$schema, db_params$table_name)`} \\
({`metadata_df$column_name`*}) select {`metadata_df$column_name`*} from \\
{`quoted_schema_tbl(db_params$schema,
paste0(db_params$table_name,'_staging_'))`};",
.con = DBI::ANSI()
)
Expand All @@ -338,9 +333,8 @@ populate_table_from_staging <- function(db_params) {

execute_sql(db_params$server, db_params$database, sql, FALSE)
message(glue::glue(
"Table: {db_params$schema}.{db_params$table_name}",
"successfully populated from staging",
.sep = " "
"Table: {db_params$schema}.{db_params$table_name} \\
successfully populated from staging"
))
}

Expand All @@ -357,9 +351,8 @@ delete_staging_table <- function(db_params, silent = FALSE) {
)
if (!silent) {
message(glue::glue(
"Staging table: {db_params$schema}.{db_params$table_name}_staging_",
"successfully deleted from database.",
.sep = " "
"Staging table: {db_params$schema}.{db_params$table_name}_staging_ \\
successfully deleted from database."
))
}
}
Expand Down Expand Up @@ -390,9 +383,8 @@ create_table <- function(db_params, silent = FALSE) {
)
if (!silent) {
message(glue::glue(
"Table: {db_params$schema}.{db_params$table_name}",
"successfully created in database",
.sep = " "
"Table: {db_params$schema}.{db_params$table_name} \\
successfully created in database"
))
}
}
Expand All @@ -407,10 +399,9 @@ clean_table_name <- function(table_name) {
# Advise if changing target table name
if (new_name != table_name) {
warning(glue::glue(
"Cannot name a table {table_name}",
"replacing with name {new_name}",
"(see ODBC table name limitations)",
.sep = " "
"Cannot name a table {table_name} \\
replacing with name {new_name} \\
(see ODBC table name limitations)"
))
}
return(new_name)
Expand Down Expand Up @@ -553,9 +544,8 @@ write_dataframe_to_db <- function(server,
# If not appending and exists then inform that will be overwritten
} else {
warning(glue::glue(
"Database table: {schema}.{table_name} already exists",
"attempting to drop and replace it...",
.sep = " "
"Database table: {schema}.{table_name} already exists \\
attempting to drop and replace it..."
), call. = FALSE)
# Drop the existing table
drop_table_from_db(
Expand Down Expand Up @@ -587,8 +577,7 @@ write_dataframe_to_db <- function(server,
}
end_time <- Sys.time()
message(glue::glue(
"Loading completed in",
"{round(difftime(end_time, start_time,units = 'mins')[[1]], 2)} minutes.",
.sep = " "
"Loading completed in \\
{round(difftime(end_time, start_time,units = 'mins')[[1]], 2)} minutes."
))
}
41 changes: 20 additions & 21 deletions R/db_table_metadata.R
Original file line number Diff line number Diff line change
@@ -1,20 +1,20 @@
# basic column name, datatype and length query
col_query <- function(database, schema, table_name) {
glue::glue_sql("
SELECT column_name, data_type, CHARACTER_MAXIMUM_LENGTH
FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_CATALOG = {database}
AND TABLE_SCHEMA = {schema}
SELECT column_name, data_type, CHARACTER_MAXIMUM_LENGTH \\
FROM INFORMATION_SCHEMA.COLUMNS \\
WHERE TABLE_CATALOG = {database} \\
AND TABLE_SCHEMA = {schema} \\
AND TABLE_NAME = {table_name}", .con = DBI::ANSI())
}


update_col_query <- function(columns_info) {
# To add the length of nvarchar column so appears as e.g. nvarchar(50)
update_char <- glue::glue(
"{columns_info[! is.na(columns_info$CHARACTER_MAXIMUM_LENGTH), 2]}",
"({as.character(columns_info",
"[! is.na(columns_info$CHARACTER_MAXIMUM_LENGTH), 3])})"
"{columns_info[! is.na(columns_info$CHARACTER_MAXIMUM_LENGTH), 2]}\\
({as.character(columns_info\\
[! is.na(columns_info$CHARACTER_MAXIMUM_LENGTH), 3])})"
)

columns_info$data_type[!is.na(columns_info$CHARACTER_MAXIMUM_LENGTH)] <-
Expand All @@ -32,27 +32,26 @@ get_table_stats <- function(i, columns_info, schema, table_name) {

# Generate the min/max query based on the data type
min_max_query <- if (data_type != "bit") {

Check warning on line 34 in R/db_table_metadata.R

View workflow job for this annotation

GitHub Actions / lint

file=R/db_table_metadata.R,line=34,col=3,[object_usage_linter] local variable 'min_max_query' assigned but may not be used
glue::glue_sql("(SELECT MIN(CAST({`col`} AS NVARCHAR(225)))
FROM {`schema`}.{`table_name`}
WHERE {col} IS NOT NULL) AS minimum_value,
(SELECT MAX(CAST({`col`} AS NVARCHAR(225)))
FROM {`schema`}.{`table_name`}
WHERE {col} IS NOT NULL) AS maximum_value",
glue::glue_sql("(SELECT MIN(CAST({`col`} AS NVARCHAR(225))) \\
FROM {`schema`}.{`table_name`} \\
WHERE {col} IS NOT NULL) AS minimum_value, \\
(SELECT MAX(CAST({`col`} AS NVARCHAR(225))) \\
FROM {`schema`}.{`table_name`} \\
WHERE {col} IS NOT NULL) AS maximum_value",
.con = DBI::ANSI()
)
} else {
glue::glue_sql("NULL AS minimum_value, NULL AS maximum_value")
}

# Building the full SQL query
glue::glue_sql("
SELECT {col} AS column_name, {data_type} AS data_type,
(SELECT COUNT(*) FROM {`schema`}.{`table_name`}) AS row_count,
(SELECT COUNT(*) FROM {`schema`}.{`table_name`} WHERE {col} IS NULL)
AS null_count,
(SELECT COUNT(DISTINCT {`col`}) FROM {`schema`}.{`table_name`}
WHERE {col} IS NOT NULL) AS distinct_values,
{min_max_query}",
glue::glue_sql(
"SELECT {col} AS column_name, {data_type} AS data_type, \\

Check warning on line 49 in R/db_table_metadata.R

View workflow job for this annotation

GitHub Actions / lint

file=R/db_table_metadata.R,line=49,col=0,[indentation_linter] Indentation should be 4 spaces but is 0 spaces.
(SELECT COUNT(*) FROM {`schema`}.{`table_name`}) AS row_count, \\
(SELECT COUNT(*) FROM {`schema`}.{`table_name`} WHERE {col} IS NULL) \\
AS null_count, \\
(SELECT COUNT(DISTINCT {`col`}) FROM {`schema`}.{`table_name`} \\
WHERE {col} IS NOT NULL) AS distinct_values, {min_max_query}",
.con = DBI::ANSI()
)
}
Expand Down
22 changes: 10 additions & 12 deletions R/drop_table.R
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
# create versioned table sql
create_drop_sql_versioned <- function(schema, table_name) {
history_table <- quoted_schema_tbl(schema, glue::glue(table_name, "History"))

Check warning on line 3 in R/drop_table.R

View workflow job for this annotation

GitHub Actions / lint

file=R/drop_table.R,line=3,col=3,[object_usage_linter] local variable 'history_table' assigned but may not be used
glue::glue_sql("ALTER TABLE {`quoted_schema_tbl(schema, table_name)`} ",
"SET ( SYSTEM_VERSIONING = OFF );",
"DROP TABLE {`quoted_schema_tbl(schema, table_name)`};",
"DROP TABLE {`history_table`};",
glue::glue_sql("ALTER TABLE {`quoted_schema_tbl(schema, table_name)`} \\
SET ( SYSTEM_VERSIONING = OFF ); \\
DROP TABLE {`quoted_schema_tbl(schema, table_name)`}; \\
DROP TABLE {`history_table`};",
.con = DBI::ANSI()
)
}
Expand Down Expand Up @@ -104,10 +104,10 @@ drop_table_from_db <- function(server,
error = function(cond) {
if (drop_sql$versioned) {
cond$message <- glue::glue(
"{cond$message}\n\n",
"{schema}.{table_name} is a VERSIONED TABLE.\n\n",
"Contact a system admin to request that they drop this versioned ",
"table for you as you do not have sufficient permissions.",
"{cond$message}\n\n \\
{schema}.{table_name} is a VERSIONED TABLE.\n\n \\
Contact a system admin to request that they drop this versioned \\
table for you as you do not have sufficient permissions.",
)
} else {
cond$message <- glue::glue("Error dropping table: {cond}")
Expand All @@ -119,9 +119,7 @@ drop_table_from_db <- function(server,

# Output message if required
if (!silent) {
message(glue::glue("Table: {schema}.{table_name}",
"successfully deleted.",
.sep = " "
))
message(glue::glue("Table: {schema}.{table_name} \\
successfully deleted."))
}
}
13 changes: 5 additions & 8 deletions R/read_table.R
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,8 @@ table_select_list <- function(server,
# Cast datetime2 columns to datetime- workaround due to old ODBC client drivers
col_select <- function(column_name, datetime2_cols_to_cast) {
if (column_name %in% datetime2_cols_to_cast) {
return(glue::glue_sql("CAST({`column_name`} AS datetime) ",
"AS {`column_name`}",
return(glue::glue_sql("CAST({`column_name`} AS datetime) \\
AS {`column_name`}",
.con = DBI::ANSI()
))
} else {
Expand Down Expand Up @@ -86,13 +86,10 @@ create_read_sql <- function(schema,
)
if (!is.null(filter_stmt)) {
filter_stmt <- format_filter(filter_stmt)
glue::glue(
initial_sql,
"WHERE {filter_stmt};",
.sep = " "
)
glue::glue_sql(glue::glue(initial_sql, " WHERE {filter_stmt};"),
.con = DBI::ANSI())
} else {
glue::glue(initial_sql, ";")
glue::glue_sql(glue::glue(initial_sql, ";"), .con = DBI::ANSI())
}
}

Expand Down
22 changes: 11 additions & 11 deletions R/show_schema_tables.R
Original file line number Diff line number Diff line change
@@ -1,18 +1,18 @@
table_sql <- function(schema) {
glue::glue_sql("SELECT name AS 'table',
create_date AS 'creation_date'
FROM sys.tables
WHERE SCHEMA_NAME(schema_id) = {schema}
order by 2", .con = DBI::ANSI())
glue::glue_sql("SELECT name AS 'table', \\
create_date AS 'creation_date' \\
FROM sys.tables \\
WHERE SCHEMA_NAME(schema_id) = {schema} \\
order by 2", .con = DBI::ANSI())
}

table_view_sql <- function(schema) {
glue::glue_sql("SELECT name AS 'table',
type_desc AS 'object_type',
create_date AS 'creation_date'
FROM sys.objects
WHERE type IN ('U', 'V') -- 'U'ser tables, 'V'iews
AND SCHEMA_NAME(schema_id) = {schema}
glue::glue_sql("SELECT name AS 'table', \\
type_desc AS 'object_type', \\
create_date AS 'creation_date' \\
FROM sys.objects \\
WHERE type IN ('U', 'V') -- 'U'ser tables, 'V'iews \\
AND SCHEMA_NAME(schema_id) = {schema} \\
order by type, name", .con = DBI::ANSI())
}

Expand Down
Loading

0 comments on commit 7a5a234

Please sign in to comment.