Subversion Repositories SmartDukaan

Rev

Blame | Last modification | View Log | RSS feed

package com.smartdukaan.cron.scheduled;

import com.spice.profitmandi.dao.entity.solr.SolrUpdateFailure;
import com.spice.profitmandi.dao.repository.solr.SolrUpdateFailureRepository;
import com.spice.profitmandi.dao.service.solr.FofoSolr;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;

import java.time.LocalDateTime;
import java.util.List;

/**
 * Scheduled job to retry failed Solr updates.
 * Runs every 5 minutes to retry pending failures.
 * Max 3 auto-retries before marking as FAILED for manual intervention.
 * Only active in production environment.
 */
@Component
public class SolrRetryJob {

    private static final Logger log = LogManager.getLogger(SolrRetryJob.class);
    private static final int MAX_AUTO_RETRIES = 3;

    @Value("${prod:false}")
    private boolean isProd;

    @Autowired
    private SolrUpdateFailureRepository failureRepository;

    @Autowired
    private FofoSolr fofoSolr;

    /**
     * Retry pending Solr updates every 5 minutes.
     * Only retries records with retry count < MAX_AUTO_RETRIES.
     * Only runs in production environment.
     */
    @Scheduled(cron = "0 */5 * * * *")
    @Transactional(rollbackFor = Throwable.class)
    public void retryPendingUpdates() {
        if (!isProd) {
            return;
        }

        log.debug("Starting Solr retry job");

        List<SolrUpdateFailure> failures = failureRepository.selectAllPending();

        if (failures.isEmpty()) {
            log.debug("No pending Solr updates to retry");
            return;
        }

        log.info("Found {} pending Solr update failures to retry", failures.size());

        int success = 0;
        int failed = 0;
        int skipped = 0;

        for (SolrUpdateFailure failure : failures) {
            // Skip if max retries exceeded
            if (failure.getRetryCount() >= MAX_AUTO_RETRIES) {
                failure.setStatus("FAILED");
                failureRepository.update(failure);
                skipped++;
                log.warn("Max retries exceeded for catalogId={}, marking as FAILED", failure.getCatalogId());
                continue;
            }

            try {
                fofoSolr.updateSingleCatalog(failure.getCatalogId());
                failureRepository.deleteById(failure.getId());
                success++;
                log.info("Retry successful for catalogId={}", failure.getCatalogId());
            } catch (Exception e) {
                failure.setRetryCount(failure.getRetryCount() + 1);
                failure.setLastRetryAt(LocalDateTime.now());
                failure.setErrorMessage(truncateErrorMessage(e.getMessage()));

                if (failure.getRetryCount() >= MAX_AUTO_RETRIES) {
                    failure.setStatus("FAILED");
                    log.warn("Retry #{} failed for catalogId={}, marking as FAILED for manual intervention",
                            failure.getRetryCount(), failure.getCatalogId());
                } else {
                    log.warn("Retry #{} failed for catalogId={}, will retry again",
                            failure.getRetryCount(), failure.getCatalogId());
                }

                failureRepository.update(failure);
                failed++;
            }
        }

        log.info("Solr retry job completed: success={}, failed={}, skipped={}", success, failed, skipped);
    }

    private String truncateErrorMessage(String message) {
        if (message == null) {
            return "Unknown error";
        }
        return message.length() > 2000 ? message.substring(0, 2000) : message;
    }
}