Skip to content

Commit

Permalink
Merge branch 'master' into DHIS2-18370_2.42
Browse files Browse the repository at this point in the history
  • Loading branch information
maikelarabori authored Dec 17, 2024
2 parents d4d7b7b + a46a040 commit 2d14412
Show file tree
Hide file tree
Showing 95 changed files with 4,395 additions and 2,452 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@

@NoArgsConstructor(access = lombok.AccessLevel.PRIVATE)
public final class EnrollmentAnalyticsColumnName {

public static final String ENROLLMENT_COLUMN_NAME = "enrollment";
public static final String TRACKED_ENTITY_COLUMN_NAME = "trackedentity";
public static final String ENROLLMENT_DATE_COLUMN_NAME = "enrollmentdate";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,5 +46,6 @@ public enum DimensionItemType {
ORGANISATION_UNIT_GROUP,
CATEGORY_OPTION_GROUP,
EXPRESSION_DIMENSION_ITEM,
SUBEXPRESSION_DIMENSION_ITEM
SUBEXPRESSION_DIMENSION_ITEM,
OPTION_SET
}
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@
import org.hisp.dhis.outboundmessage.OutboundMessageResponse;
import org.hisp.dhis.outboundmessage.OutboundMessageResponseSummary;
import org.hisp.dhis.user.User;
import org.springframework.util.concurrent.ListenableFuture;

/**
* @author Lars Helge Overland
Expand Down Expand Up @@ -68,10 +67,6 @@ Future<OutboundMessageResponse> sendMessageAsync(
*/
OutboundMessageResponseSummary sendMessageBatch(OutboundMessageBatch batch);

/** sends message batch asynchronously */
ListenableFuture<OutboundMessageResponseSummary> sendMessageBatchAsync(
OutboundMessageBatch batch);

/** To check if given service is configured and ready to use. */
boolean isConfigured();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,8 @@
import static org.hisp.dhis.analytics.event.data.EnrollmentQueryHelper.getHeaderColumns;
import static org.hisp.dhis.analytics.event.data.EnrollmentQueryHelper.getOrgUnitLevelColumns;
import static org.hisp.dhis.analytics.event.data.EnrollmentQueryHelper.getPeriodColumns;
import static org.hisp.dhis.analytics.table.JdbcEventAnalyticsTableManager.OU_GEOMETRY_COL_SUFFIX;
import static org.hisp.dhis.analytics.table.JdbcEventAnalyticsTableManager.OU_NAME_COL_SUFFIX;
import static org.hisp.dhis.analytics.table.AbstractEventJdbcTableManager.OU_GEOMETRY_COL_SUFFIX;
import static org.hisp.dhis.analytics.table.AbstractEventJdbcTableManager.OU_NAME_COL_SUFFIX;
import static org.hisp.dhis.analytics.util.AnalyticsUtils.replaceStringBetween;
import static org.hisp.dhis.analytics.util.AnalyticsUtils.throwIllegalQueryEx;
import static org.hisp.dhis.analytics.util.AnalyticsUtils.withExceptionHandling;
Expand Down Expand Up @@ -375,8 +375,9 @@ private void addDimensionSelectColumns(
singleQuote(period.getIsoDate()) + " as " + period.getPeriodType().getName());
} else {
throw new IllegalStateException(
"Program indicator non-default boundary query must have "
+ "exactly one period, or no periods and a period filter");
"""
Program indicator non-default boundary query must have \"
exactly one period, or no periods and a period filter""");
}
});
}
Expand Down Expand Up @@ -455,7 +456,6 @@ private ColumnAndAlias getColumnAndAlias(
} else if (queryItem.getValueType() == ValueType.NUMBER && !isGroupByClause) {
ColumnAndAlias columnAndAlias =
getColumnAndAlias(queryItem, isAggregated, queryItem.getItemName());

return ColumnAndAlias.ofColumnAndAlias(
columnAndAlias.getColumn(),
defaultIfNull(columnAndAlias.getAlias(), queryItem.getItemName()));
Expand Down Expand Up @@ -532,14 +532,10 @@ protected Optional<String> getAlias(QueryItem queryItem) {
@Transactional(readOnly = true, propagation = REQUIRES_NEW)
public Grid getAggregatedEventData(EventQueryParams params, Grid grid, int maxLimit) {
String aggregateClause = getAggregateClause(params);
String columns = StringUtils.join(getSelectColumns(params, true), ",");

String sql =
TextUtils.removeLastComma(
"select "
+ aggregateClause
+ " as value,"
+ StringUtils.join(getSelectColumns(params, true), ",")
+ " ");
TextUtils.removeLastComma("select " + aggregateClause + " as value," + columns + " ");

// ---------------------------------------------------------------------
// Criteria
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,6 @@
import org.hisp.dhis.analytics.table.model.Skip;
import org.hisp.dhis.analytics.table.setting.AnalyticsTableSettings;
import org.hisp.dhis.category.CategoryService;
import org.hisp.dhis.common.IdentifiableObject;
import org.hisp.dhis.common.IdentifiableObjectManager;
import org.hisp.dhis.common.ValueType;
import org.hisp.dhis.dataapproval.DataApprovalLevelService;
Expand Down Expand Up @@ -121,9 +120,8 @@ protected String getColumnExpression(ValueType valueType, String columnExpressio
} else if (valueType.isInteger()) {
return getCastExpression(columnExpression, NUMERIC_REGEXP, sqlBuilder.dataTypeBigInt());
} else if (valueType.isBoolean()) {
return String.format(
"case when %1$s = 'true' then 1 when %1$s = 'false' then 0 else null end",
columnExpression);
return sqlBuilder.ifThenElse(
columnExpression + " = 'true'", "1", columnExpression + " = 'false'", "0", "null");
} else if (valueType.isDate()) {
return getCastExpression(columnExpression, DATE_REGEXP, sqlBuilder.dataTypeTimestamp());
} else if (valueType.isGeo() && isSpatialSupport()) {
Expand All @@ -147,8 +145,9 @@ protected String getColumnExpression(ValueType valueType, String columnExpressio
*/
protected String getCastExpression(String columnExpression, String filterRegex, String dataType) {
String filter = sqlBuilder.regexpMatch(columnExpression, filterRegex);
return String.format(
"case when %s then cast(%s as %s) else null end", filter, columnExpression, dataType);
String result = String.format("cast(%s as %s)", columnExpression, dataType);

return sqlBuilder.ifThen(filter, result);
}

@Override
Expand Down Expand Up @@ -214,6 +213,10 @@ protected List<AnalyticsTableColumn> getColumnForAttribute(TrackedEntityAttribut
*/
private List<AnalyticsTableColumn> getColumnForOrgUnitAttribute(
TrackedEntityAttribute attribute) {
if (!sqlBuilder.supportsCorrelatedSubquery()) {
return List.of();
}

Validate.isTrue(attribute.getValueType().isOrganisationUnit());
List<AnalyticsTableColumn> columns = new ArrayList<>();

Expand Down Expand Up @@ -283,24 +286,12 @@ private String getOrgUnitSelectSubquery(TrackedEntityAttribute attribute, String
protected String getAttributeValueJoinClause(Program program) {
String template =
"""
left join ${trackedentityattributevalue} as ${uid} \
left join trackedentityattributevalue as ${uid} \
on en.trackedentityid=${uid}.trackedentityid \
and ${uid}.trackedentityattributeid = ${id}\s""";

return program.getNonConfidentialTrackedEntityAttributes().stream()
.map(attribute -> replaceQualify(template, toVariableMap(attribute)))
.collect(Collectors.joining());
}

/**
* Returns a map of identifiable properties and values.
*
* @param object the {@link IdentifiableObject}.
* @return a {@link Map}.
*/
protected Map<String, String> toVariableMap(IdentifiableObject object) {
return Map.of(
"id", String.valueOf(object.getId()),
"uid", quote(object.getUid()));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@
import org.hisp.dhis.calendar.Calendar;
import org.hisp.dhis.category.CategoryService;
import org.hisp.dhis.common.DimensionalObject;
import org.hisp.dhis.common.IdentifiableObject;
import org.hisp.dhis.common.IdentifiableObjectManager;
import org.hisp.dhis.common.ValueType;
import org.hisp.dhis.commons.collection.ListUtils;
Expand Down Expand Up @@ -230,19 +231,25 @@ public void swapTable(AnalyticsTableUpdateParams params, AnalyticsTable table) {
boolean skipMasterTable =
params.isPartialUpdate() && tableExists && table.getTableType().isLatestPartition();

log.info(
"Swapping table, master table exists: '{}', skip master table: '{}'",
tableExists,
skipMasterTable);
log.info("Swapping table: '{}'", table.getMainName());
log.info("Master table exists: '{}', skip master table: '{}'", tableExists, skipMasterTable);

List<Table> swappedPartitions = new UniqueArrayList<>();
table.getTablePartitions().stream()
.forEach(p -> swappedPartitions.add(swapTable(p, p.getMainName())));

if (!sqlBuilder.supportsDeclarativePartitioning()) {
table.getTablePartitions().forEach(part -> swapTable(part, part.getMainName()));
table.getTablePartitions().forEach(part -> swappedPartitions.add(part.fromStaging()));
}

if (!skipMasterTable) {
// Full replace update and main table exist, swap main table
swapTable(table, table.getMainName());
} else {
swappedPartitions.forEach(
partition -> swapInheritance(partition, table.getName(), table.getMainName()));
// Incremental append update, update parent of partitions to existing main table
if (!sqlBuilder.supportsDeclarativePartitioning()) {
swappedPartitions.forEach(
partition -> swapParentTable(partition, table.getName(), table.getMainName()));
}
dropTable(table);
}
}
Expand Down Expand Up @@ -289,9 +296,13 @@ public int invokeAnalyticsTableSqlHooks() {
* @param stagingTable the staging table.
* @param mainTableName the main table name.
*/
private Table swapTable(Table stagingTable, String mainTableName) {
executeSilently(sqlBuilder.swapTable(stagingTable, mainTableName));
return stagingTable.swapFromStaging();
private void swapTable(Table stagingTable, String mainTableName) {
if (sqlBuilder.supportsMultiStatements()) {
executeSilently(sqlBuilder.swapTable(stagingTable, mainTableName));
} else {
executeSilently(sqlBuilder.dropTableIfExistsCascade(mainTableName));
executeSilently(sqlBuilder.renameTable(stagingTable, mainTableName));
}
}

/**
Expand All @@ -301,8 +312,13 @@ private Table swapTable(Table stagingTable, String mainTableName) {
* @param stagingMasterName the staging master table name.
* @param mainMasterName the main master table name.
*/
private void swapInheritance(Table partition, String stagingMasterName, String mainMasterName) {
executeSilently(sqlBuilder.swapParentTable(partition, stagingMasterName, mainMasterName));
private void swapParentTable(Table partition, String stagingMasterName, String mainMasterName) {
if (sqlBuilder.supportsMultiStatements()) {
executeSilently(sqlBuilder.swapParentTable(partition, stagingMasterName, mainMasterName));
} else {
executeSilently(sqlBuilder.removeParentTable(partition, stagingMasterName));
executeSilently(sqlBuilder.setParentTable(partition, mainMasterName));
}
}

/**
Expand Down Expand Up @@ -437,6 +453,18 @@ protected void invokeTimeAndLog(String sql, String logPattern, Object... args) {
log.info("{} in: {}", logMessage, timer.stop().toString());
}

/**
* Returns a map of identifiable properties and values.
*
* @param object the {@link IdentifiableObject}.
* @return a {@link Map}.
*/
protected Map<String, String> toVariableMap(IdentifiableObject object) {
return Map.of(
"id", String.valueOf(object.getId()),
"uid", quote(object.getUid()));
}

/**
* Filters out analytics table columns which were created after the time of the last successful
* resource table update. This so that the create table query does not refer to columns not
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,9 @@
*/
package org.hisp.dhis.analytics.table;

import static org.hisp.dhis.analytics.AnalyticsTableType.ENROLLMENT;
import static org.hisp.dhis.analytics.AnalyticsTableType.EVENT;
import static org.hisp.dhis.analytics.AnalyticsTableType.TRACKED_ENTITY_INSTANCE;
import static org.hisp.dhis.common.collection.CollectionUtils.emptyIfNull;
import static org.hisp.dhis.scheduling.JobProgress.FailurePolicy.SKIP_STAGE;
import static org.hisp.dhis.util.DateUtils.toLongDate;
Expand All @@ -48,8 +51,10 @@
import org.hisp.dhis.analytics.table.setting.AnalyticsTableSettings;
import org.hisp.dhis.resourcetable.ResourceTableService;
import org.hisp.dhis.scheduling.JobProgress;
import org.hisp.dhis.setting.SystemSettings;
import org.hisp.dhis.setting.SystemSettingsService;
import org.hisp.dhis.system.util.Clock;
import org.hisp.dhis.tablereplication.TableReplicationService;
import org.springframework.stereotype.Service;

/**
Expand All @@ -63,6 +68,8 @@ public class DefaultAnalyticsTableGenerator implements AnalyticsTableGenerator {

private final ResourceTableService resourceTableService;

private final TableReplicationService tableReplicationService;

private final SystemSettingsService settingsService;

private final AnalyticsTableSettings settings;
Expand All @@ -73,39 +80,40 @@ public class DefaultAnalyticsTableGenerator implements AnalyticsTableGenerator {

@Override
public void generateAnalyticsTables(AnalyticsTableUpdateParams params0, JobProgress progress) {
Clock clock = new Clock(log).startClock();
Date lastSuccessfulUpdate =
settingsService.getCurrentSettings().getLastSuccessfulAnalyticsTablesUpdate();

Set<AnalyticsTableType> availableTypes =
analyticsTableServices.stream()
.map(AnalyticsTableService::getAnalyticsTableType)
.collect(Collectors.toSet());

AnalyticsTableUpdateParams params =
final Clock clock = new Clock(log).startClock();
final SystemSettings systemSettings = settingsService.getCurrentSettings();
final Date lastSuccessfulUpdate = systemSettings.getLastSuccessfulAnalyticsTablesUpdate();
final AnalyticsTableUpdateParams params =
params0.toBuilder().lastSuccessfulUpdate(lastSuccessfulUpdate).build();
final Set<AnalyticsTableType> skipTypes = emptyIfNull(params.getSkipTableTypes());

log.info("Found {} analytics table types: {}", availableTypes.size(), availableTypes);
log.info("Analytics table update: {}", params);
log.info("Found analytics table types: {}", getAvailableTableTypes());
log.info("Analytics table update params: {}", params);
log.info("Last successful analytics table update: {}", toLongDate(lastSuccessfulUpdate));
log.info("Analytics database: {}", settings.isAnalyticsDatabase());
log.info("Skipping table types: {}", skipTypes);

progress.startingProcess(
"Analytics table update process{}", (params.isLatestUpdate() ? " (latest partition)" : ""));

if (!params.isSkipResourceTables() && !params.isLatestUpdate()) {
generateResourceTablesInternal(progress);

if (settings.isAnalyticsDatabaseConfigured()) {
if (settings.isAnalyticsDatabase()) {
log.info("Replicating resource tables in analytics database");
resourceTableService.replicateAnalyticsResourceTables();
}
}

Set<AnalyticsTableType> skipTypes = emptyIfNull(params.getSkipTableTypes());
if (!params.isLatestUpdate() && settings.isAnalyticsDatabase()) {
if (!skipTypes.containsAll(Set.of(EVENT, ENROLLMENT, TRACKED_ENTITY_INSTANCE))) {
log.info("Replicating tracked entity attribute value table");
tableReplicationService.replicateTrackedEntityAttributeValue();
}
}

for (AnalyticsTableService service : analyticsTableServices) {
AnalyticsTableType tableType = service.getAnalyticsTableType();

if (!skipTypes.contains(tableType)) {
service.create(params, progress);
}
Expand All @@ -132,7 +140,7 @@ private void updateLastSuccessfulSystemSettings(AnalyticsTableUpdateParams param

@Override
public void generateResourceTables(JobProgress progress) {
Clock clock = new Clock().startClock();
final Clock clock = new Clock().startClock();

progress.startingProcess("Generating resource tables");

Expand All @@ -150,6 +158,11 @@ public void generateResourceTables(JobProgress progress) {
// Supportive methods
// -------------------------------------------------------------------------

/**
* Generates resource tables.
*
* @param progress the {@link JobProgress}.
*/
private void generateResourceTablesInternal(JobProgress progress) {
resourceTableService.dropAllSqlViews(progress);

Expand All @@ -162,4 +175,15 @@ private void generateResourceTablesInternal(JobProgress progress) {

settingsService.put("keyLastSuccessfulResourceTablesUpdate", new Date());
}

/**
* Returns the available analytics table types.
*
* @return a set of {@link AnalyticsTableType}.
*/
private Set<AnalyticsTableType> getAvailableTableTypes() {
return analyticsTableServices.stream()
.map(AnalyticsTableService::getAnalyticsTableType)
.collect(Collectors.toSet());
}
}
Loading

0 comments on commit 2d14412

Please sign in to comment.