@org.quartz.DisallowConcurrentExecution - java examples

Here are the examples of the java api @org.quartz.DisallowConcurrentExecution taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

132 Examples 7

19 View Complete Implementation : PollableTaskCleanupJob.java
Copyright Apache License 2.0
Author : box
/**
 * @author aloison
 */
@Profile("!disablescheduling")
@Configuration
@Component
@DisallowConcurrentExecution
public clreplaced PollableTaskCleanupJob implements Job {

    static final String FINISH_ZOMBIE_TASKS_WITH_ERROR = "Finish zombie tasks with error";

    /**
     * in milliseconds
     */
    static final int REPEAT_INTERVAL = 30000;

    /**
     * logger
     */
    static Logger logger = LoggerFactory.getLogger(PollableTaskCleanupJob.clreplaced);

    @Autowired
    PollableTaskCleanupService pollableTaskCleanupService;

    /**
     * @see PollableTaskCleanupService#finishZombieTasksWithError()
     * It is triggered every 30 seconds (= 30,000 milliseconds).
     */
    @Override
    public void execute(JobExecutionContext context) throws JobExecutionException {
        logger.debug(FINISH_ZOMBIE_TASKS_WITH_ERROR);
        pollableTaskCleanupService.finishZombieTasksWithError();
    }

    @Bean(name = "jobDetailPollableTaskCleanup")
    public JobDetailFactoryBean jobDetailPollableTaskCleanup() {
        JobDetailFactoryBean jobDetailFactory = new JobDetailFactoryBean();
        jobDetailFactory.setJobClreplaced(PollableTaskCleanupJob.clreplaced);
        jobDetailFactory.setDescription(FINISH_ZOMBIE_TASKS_WITH_ERROR);
        jobDetailFactory.setDurability(true);
        return jobDetailFactory;
    }

    @Bean
    public SimpleTriggerFactoryBean triggerPollableTaskCleanup(@Qualifier("jobDetailPollableTaskCleanup") JobDetail job) {
        SimpleTriggerFactoryBean trigger = new SimpleTriggerFactoryBean();
        trigger.setJobDetail(job);
        trigger.setRepeatInterval(REPEAT_INTERVAL);
        trigger.setRepeatCount(SimpleTrigger.REPEAT_INDEFINITELY);
        return trigger;
    }
}

19 View Complete Implementation : TaskDisallowConcurrentExecutor.java
Copyright MIT License
Author : Fatezhang
/**
 * @Author [email protected]
 * @Description 执行方法 不可并发
 */
@DisallowConcurrentExecution
public clreplaced TaskDisallowConcurrentExecutor implements Job {

    @Override
    public void execute(JobExecutionContext context) throws JobExecutionException {
        JobInfo jobInfo = (JobInfo) context.getMergedJobDataMap().get("JobInfo");
        InvokeTool invokeJobUtils = SpringContextHolder.getBean(InvokeTool.clreplaced);
        invokeJobUtils.invokMethod(jobInfo);
    }
}

19 View Complete Implementation : NotifyIdentityTaskExecutor.java
Copyright MIT License
Author : bcvsolutions
/**
 * Test task executor implementation.
 * Publish notify event on idenreplacedies.
 *
 * @author Radek Tomiška
 */
@Component
@DisallowConcurrentExecution
@Description("Publish notify event on idenreplacedies")
@ConditionalOnProperty(prefix = "idm.pub.app", name = "stage", havingValue = "development")
public clreplaced NotifyIdenreplacedyTaskExecutor extends AbstractSchedulableStatefulExecutor<IdmIdenreplacedyDto> {

    private static final String PARAMETER_TEXT = "text";

    // 
    @Autowired
    private IdmIdenreplacedyService idenreplacedyService;

    @Autowired
    private EnreplacedyEventManager enreplacedyEventManager;

    // 
    private String text;

    @Override
    public void init(Map<String, Object> properties) {
        super.init(properties);
        // 
        text = getParameterConverter().toString(properties, PARAMETER_TEXT);
    }

    @Override
    public Page<IdmIdenreplacedyDto> gereplacedemsToProcess(Pageable pageable) {
        IdmIdenreplacedyFilter filter = new IdmIdenreplacedyFilter();
        filter.setText(text);
        // 
        return idenreplacedyService.find(filter, pageable);
    }

    @Override
    public Optional<OperationResult> processItem(IdmIdenreplacedyDto dto) {
        try {
            enreplacedyEventManager.changedEnreplacedy(dto);
            return Optional.of(new OperationResult.Builder(OperationState.EXECUTED).build());
        } catch (Exception ex) {
            throw new CoreException(ex);
        }
    }

    @Override
    public List<String> getPropertyNames() {
        List<String> parameters = super.getPropertyNames();
        parameters.add(PARAMETER_TEXT);
        return parameters;
    }

    @Override
    public Map<String, Object> getProperties() {
        Map<String, Object> properties = super.getProperties();
        properties.put(PARAMETER_TEXT, text);
        return properties;
    }
}

19 View Complete Implementation : BranchNotificationJob.java
Copyright Apache License 2.0
Author : box
/**
 * Job that sends notifications for a branch.
 */
@Component
@DisallowConcurrentExecution
public clreplaced BranchNotificationJob extends SchedulableJob {

    /**
     * logger
     */
    static Logger logger = LoggerFactory.getLogger(BranchNotificationJob.clreplaced);

    static final String BRANCH_ID = "branchId";

    @Autowired
    BranchNotificationService branchNotificationService;

    @Override
    protected String getDescription() {
        return "Sends notifications for a branch";
    }

    @Override
    public void execute(JobExecutionContext context) throws JobExecutionException {
        Long branchId = context.getMergedJobDataMap().getLong(BRANCH_ID);
        logger.debug("execute for branchId: {}", branchId);
        branchNotificationService.sendNotificationsForBranch(branchId);
    }

    public void schedule(Long branchId) {
        JobDataMap jobDataMap = new JobDataMap();
        jobDataMap.put(BRANCH_ID, branchId.toString());
        schedule(jobDataMap, BRANCH_ID);
    }
}

19 View Complete Implementation : ClearTempDataJobImpl.java
Copyright Apache License 2.0
Author : billchen198318
@DisallowConcurrentExecution
public clreplaced ClearTempDataJobImpl extends BaseJob implements Job {

    protected static Logger log = Logger.getLogger(ClearTempDataJobImpl.clreplaced);

    public ClearTempDataJobImpl() {
        super();
    }

    @Override
    protected void executeInternal(JobExecutionContext context) throws JobExecutionException {
        if (ContextLoader.getCurrentWebApplicationContext() == null) {
            log.warn("ApplicationContext no completed, AppContext.getApplicationContext() == null");
            return;
        }
        log.info("begin....");
        if (this.checkCurrentlyExecutingJobs(context, this)) {
            log.warn("Same schedule job, current working...");
            return;
        }
        try {
            /**
             * doreplacedent reference:
             * com.netsteadfast.greenstep.support.CleanTempUploadForContextInitAndDestroy.java
             */
            this.loginForBackgroundProgram();
            List<SysVO> systems = ApplicationSiteUtils.getSystems();
            if (systems == null || systems.size() < 1) {
                return;
            }
            for (SysVO sys : systems) {
                UploadSupportUtils.cleanTempUpload(sys.getSysId());
            }
            /**
             * doreplacedent reference:
             * com.netsteadfast.greenstep.bsc.support.CleanJasperReportTempDataForContextInitAndDestroy.java
             */
            NamedParameterJdbcTemplate namedParameterJdbcTemplate = (NamedParameterJdbcTemplate) AppContext.getBean("namedParameterJdbcTemplate");
            Map<String, Object> paramMap = new HashMap<String, Object>();
            namedParameterJdbcTemplate.update("delete from bb_swot_report_mst", paramMap);
            namedParameterJdbcTemplate.update("delete from bb_swot_report_dtl", paramMap);
        } catch (ServiceException e) {
            e.printStackTrace();
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            try {
                this.logoutForBackgroundProgram();
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
        log.info("end....");
    }
}

19 View Complete Implementation : HrEndContractProcess.java
Copyright MIT License
Author : bcvsolutions
/**
 * HR process - end of idenreplacedy's contract process. The processes is started
 * for contracts that are not valid (meaning validFrom and validTill).
 *
 * "hrEndContract" can be configured as process workflow.
 *
 * @author Jan Helbich
 * @author Radek Tomiška
 * @since 7.5.1
 */
@Service
@Description("HR process - end of contract")
@DisallowConcurrentExecution
public clreplaced HrEndContractProcess extends AbstractHrProcess {

    @Autowired
    private IdmIdenreplacedyContractService idenreplacedyContractService;

    @Autowired
    private IdenreplacedyContractEndProcessor idenreplacedyContractEndProcessor;

    public HrEndContractProcess() {
    }

    public HrEndContractProcess(boolean skipAutomaticRoleRecalculation) {
        super(skipAutomaticRoleRecalculation);
    }

    @Override
    public boolean continueOnException() {
        return true;
    }

    @Override
    public boolean requireNewTransaction() {
        return true;
    }

    /**
     * {@inheritDoc}
     *
     * Find all idenreplacedy contracts, that are both valid and enabled.
     */
    @Override
    public Page<IdmIdenreplacedyContractDto> gereplacedemsToProcess(Pageable pageable) {
        IdmIdenreplacedyContractFilter filter = new IdmIdenreplacedyContractFilter();
        filter.setValid(Boolean.FALSE);
        // 
        return idenreplacedyContractService.find(filter, pageable);
    }

    @Override
    public Optional<OperationResult> processItem(IdmIdenreplacedyContractDto dto) {
        if (!StringUtils.isEmpty(getWorkflowName())) {
            // wf is configured - execute wf instance
            return super.processItem(dto);
        }
        return Optional.of(idenreplacedyContractEndProcessor.process(dto, isSkipAutomaticRoleRecalculation()));
    }
}

19 View Complete Implementation : CleanUpJob.java
Copyright Apache License 2.0
Author : cloudfoundry-incubator
@DisallowConcurrentExecution
public clreplaced CleanUpJob implements Job {

    public static final Marker LOG_MARKER = MarkerFactory.getMarker("clean-up-job");

    private static final Logger LOGGER = LoggerFactory.getLogger(CleanUpJob.clreplaced);

    @Inject
    ApplicationConfiguration configuration;

    @Inject
    List<Cleaner> cleaners;

    private final SafeExecutor safeExecutor = new SafeExecutor(CleanUpJob::log);

    @Override
    public void execute(JobExecutionContext context) {
        LOGGER.info(LOG_MARKER, format(Messages.CLEAN_UP_JOB_STARTED_BY_APPLICATION_INSTANCE_0_AT_1, configuration.getApplicationInstanceIndex(), Instant.now()));
        Date expirationTime = computeExpirationTime();
        LOGGER.info(LOG_MARKER, format(Messages.WILL_CLEAN_UP_DATA_STORED_BEFORE_0, expirationTime));
        LOGGER.info(LOG_MARKER, format(Messages.REGISTERED_CLEANERS_IN_CLEAN_UP_JOB_0, cleaners));
        for (Cleaner cleaner : cleaners) {
            safeExecutor.execute(() -> cleaner.execute(expirationTime));
        }
        LOGGER.info(LOG_MARKER, format(Messages.CLEAN_UP_JOB_FINISHED_AT_0, Instant.now()));
    }

    private Date computeExpirationTime() {
        long maxTtlForOldData = configuration.getMaxTtlForOldData();
        return Date.from(Instant.now().minusSeconds(maxTtlForOldData));
    }

    private static void log(Exception e) {
        LOGGER.error(LOG_MARKER, format(Messages.ERROR_DURING_CLEAN_UP_0, e.getMessage()), e);
    }
}

19 View Complete Implementation : DeleteProvisioningArchiveTaskExecutor.java
Copyright MIT License
Author : bcvsolutions
/**
 * Delete archived provisioning operations.
 *
 * @author Radek Tomiška
 * @since 9.6.3
 */
@Service(DeleteProvisioningArchiveTaskExecutor.TASK_NAME)
@DisallowConcurrentExecution
@Description("Delete archived provisioning operations.")
public clreplaced DeleteProvisioningArchiveTaskExecutor extends AbstractSchedulableStatefulExecutor<SysProvisioningArchiveDto> {

    private static final org.slf4j.Logger LOG = org.slf4j.LoggerFactory.getLogger(DeleteProvisioningArchiveTaskExecutor.clreplaced);

    public static final String TASK_NAME = "acc-delete-provisioning-archive-long-running-task";

    // archive older than
    public static final String PARAMETER_NUMBER_OF_DAYS = "numberOfDays";

    // archive state
    public static final String PARAMETER_OPERATION_STATE = "operationState";

    // system
    public static final String PARAMETER_SYSTEM = "system";

    // empty provisioning
    public static final String PARAMETER_EMPTY_PROVISIONING = SysProvisioningOperationFilter.PARAMETER_EMPTY_PROVISIONING;

    public static final int DEFAULT_NUMBER_OF_DAYS = 90;

    public static final OperationState DEFAULT_OPERATION_STATE = OperationState.EXECUTED;

    // 
    @Autowired
    private SysProvisioningArchiveService service;

    // 
    // optional
    private int numberOfDays = 0;

    // optional
    private OperationState operationState;

    private UUID systemId = null;

    private Boolean emptyProvisioning = null;

    @Override
    public String getName() {
        return TASK_NAME;
    }

    @Override
    public void init(Map<String, Object> properties) {
        super.init(properties);
        // 
        Long givenNumberOfDays = getParameterConverter().toLong(properties, PARAMETER_NUMBER_OF_DAYS);
        if (givenNumberOfDays != null) {
            numberOfDays = Math.toIntExact(givenNumberOfDays);
        } else {
            numberOfDays = 0;
        }
        operationState = getParameterConverter().toEnum(properties, PARAMETER_OPERATION_STATE, OperationState.clreplaced);
        systemId = getParameterConverter().toEnreplacedyUuid(properties, PARAMETER_SYSTEM, SysSystemDto.clreplaced);
        emptyProvisioning = getParameterConverter().toBoolean(properties, PARAMETER_EMPTY_PROVISIONING);
    }

    @Override
    protected boolean start() {
        LOG.warn("Start deleting empty [{}] archived provisioning operations older than [{}] days in state [{}] with system [{}].", emptyProvisioning, numberOfDays, operationState, systemId);
        // 
        return super.start();
    }

    @Override
    protected Boolean end(Boolean result, Exception ex) {
        result = super.end(result, ex);
        LOG.warn("End deleting empty [{}] archived provisioning operations older than [{}] days in state [{}] with system [{}]. Processed operations [{}].", emptyProvisioning, numberOfDays, operationState, systemId, counter);
        return result;
    }

    @Override
    public Page<SysProvisioningArchiveDto> gereplacedemsToProcess(Pageable pageable) {
        SysProvisioningOperationFilter filter = new SysProvisioningOperationFilter();
        filter.setResultState(operationState);
        filter.setSystemId(systemId);
        filter.setEmptyProvisioning(emptyProvisioning);
        if (numberOfDays > 0) {
            filter.setTill(LocalDate.now().atStartOfDay(ZoneId.systemDefault()).minusDays(numberOfDays));
        }
        // new pageable is given => records are deleted and we need the first page all time
        return service.find(filter, PageRequest.of(0, pageable.getPageSize()));
    }

    @Override
    public Optional<OperationResult> processItem(SysProvisioningArchiveDto dto) {
        service.delete(dto);
        // 
        return Optional.of(new OperationResult.Builder(OperationState.EXECUTED).build());
    }

    @Override
    public List<String> getPropertyNames() {
        List<String> parameters = super.getPropertyNames();
        parameters.add(PARAMETER_NUMBER_OF_DAYS);
        parameters.add(PARAMETER_OPERATION_STATE);
        parameters.add(PARAMETER_SYSTEM);
        parameters.add(PARAMETER_EMPTY_PROVISIONING);
        // 
        return parameters;
    }

    @Override
    public Map<String, Object> getProperties() {
        Map<String, Object> properties = super.getProperties();
        properties.put(PARAMETER_NUMBER_OF_DAYS, numberOfDays);
        properties.put(PARAMETER_OPERATION_STATE, operationState);
        properties.put(PARAMETER_SYSTEM, systemId);
        properties.put(PARAMETER_EMPTY_PROVISIONING, emptyProvisioning);
        // 
        return properties;
    }

    @Override
    public List<IdmFormAttributeDto> getFormAttributes() {
        IdmFormAttributeDto numberOfDaysAttribute = new IdmFormAttributeDto(PARAMETER_NUMBER_OF_DAYS, PARAMETER_NUMBER_OF_DAYS, PersistentType.LONG);
        numberOfDaysAttribute.setDefaultValue(String.valueOf(DEFAULT_NUMBER_OF_DAYS));
        // 
        IdmFormAttributeDto operationStateAttribute = new IdmFormAttributeDto(PARAMETER_OPERATION_STATE, PARAMETER_OPERATION_STATE, PersistentType.ENUMERATION);
        operationStateAttribute.setDefaultValue(DEFAULT_OPERATION_STATE.name());
        operationStateAttribute.setFaceType(BaseFaceType.OPERATION_STATE_ENUM);
        // 
        IdmFormAttributeDto system = new IdmFormAttributeDto(PARAMETER_SYSTEM, "System", PersistentType.UUID);
        system.setFaceType(AccFaceType.SYSTEM_SELECT);
        IdmFormAttributeDto emptyProvisioningAttribute = new IdmFormAttributeDto(PARAMETER_EMPTY_PROVISIONING, PARAMETER_EMPTY_PROVISIONING, PersistentType.BOOLEAN, BaseFaceType.BOOLEAN_SELECT);
        emptyProvisioningAttribute.setDefaultValue(Boolean.TRUE.toString());
        // 
        return Lists.newArrayList(numberOfDaysAttribute, operationStateAttribute, system, emptyProvisioningAttribute);
    }

    @Override
    public boolean supportsDryRun() {
        // TODO: get context (or LRT) in gereplacedems to process ...
        return false;
    }

    @Override
    public boolean requireNewTransaction() {
        return true;
    }

    @Override
    public boolean supportsQueue() {
        return false;
    }
}

19 View Complete Implementation : FileUploadCleanupJob.java
Copyright Apache License 2.0
Author : FINRAOS
/**
 * The file upload cleanup job.
 */
@Component(FileUploadCleanupJob.JOB_NAME)
@DisallowConcurrentExecution
public clreplaced FileUploadCleanupJob extends AbstractSystemJob {

    private static final Logger LOGGER = LoggerFactory.getLogger(FileUploadCleanupJob.clreplaced);

    public static final String JOB_NAME = "fileUploadCleanup";

    @Autowired
    private FileUploadCleanupService fileUploadCleanupService;

    @Autowired
    private ParameterHelper parameterHelper;

    @Override
    protected void executeInternal(JobExecutionContext context) throws JobExecutionException {
        // Log that the system job is started.
        LOGGER.info("Started system job. systemJobName=\"{}\"", JOB_NAME);
        // Get the parameter values.
        int thresholdMinutes = parameterHelper.getParameterValueAsInteger(parameters, ConfigurationValue.FILE_UPLOAD_CLEANUP_JOB_THRESHOLD_MINUTES);
        // Log the parameter values.
        LOGGER.info("systemJobName={} {}={}", JOB_NAME, ConfigurationValue.FILE_UPLOAD_CLEANUP_JOB_THRESHOLD_MINUTES, thresholdMinutes);
        // Mark as DELETED any dangling business object data records with storage files in S3_MANAGED_LOADING_DOCK storage.
        try {
            List<BusinessObjectDataKey> businessObjectDataKeys = fileUploadCleanupService.deleteBusinessObjectData(StorageEnreplacedy.MANAGED_LOADING_DOCK_STORAGE, thresholdMinutes);
            LOGGER.info("Deleted {} instances of loading dock business object data. systemJobName=\"{}\" storageName=\"{}\"", CollectionUtils.size(businessObjectDataKeys), JOB_NAME, StorageEnreplacedy.MANAGED_LOADING_DOCK_STORAGE);
        } catch (Exception e) {
            // Log the exception.
            LOGGER.error("Failed to delete loading dock business object data. systemJobName=\"{}\"", JOB_NAME, e);
        }
        // Log that the system job is ended.
        LOGGER.info("Completed system job. systemJobName=\"{}\"", JOB_NAME);
    }

    @Override
    public void validateParameters(List<Parameter> parameters) {
        // This system job accepts only one optional parameter with an integer value.
        if (!CollectionUtils.isEmpty(parameters)) {
            replacedert.isTrue(parameters.size() == 1, String.format("Too many parameters are specified for \"%s\" system job.", JOB_NAME));
            replacedert.isTrue(parameters.get(0).getName().equalsIgnoreCase(ConfigurationValue.FILE_UPLOAD_CLEANUP_JOB_THRESHOLD_MINUTES.getKey()), String.format("Parameter \"%s\" is not supported by \"%s\" system job.", parameters.get(0).getName(), FileUploadCleanupJob.JOB_NAME));
            parameterHelper.getParameterValueAsInteger(parameters.get(0));
        }
    }

    @Override
    public JobDataMap getJobDataMap() {
        return getJobDataMap(ConfigurationValue.FILE_UPLOAD_CLEANUP_JOB_THRESHOLD_MINUTES);
    }

    @Override
    public String getCronExpression() {
        return configurationHelper.getProperty(ConfigurationValue.FILE_UPLOAD_CLEANUP_JOB_CRON_EXPRESSION);
    }
}

19 View Complete Implementation : ChangeConfidentialStorageKeyTaskExecutor.java
Copyright MIT License
Author : bcvsolutions
/**
 * Task change all values in confidential storage with new key from file or application properties.
 * This task required start this task after you change key to new.
 * TODO: change parameter oldKey to input type preplacedword (now isn't this required because as parameter is send old key)
 *
 * @author Ondrej Kopr <[email protected]>
 */
@Service
@DisallowConcurrentExecution
@Description("Change all crypted values in confidential storage to new. This task required start after you changed key!")
public clreplaced ChangeConfidentialStorageKeyTaskExecutor extends AbstractSchedulableTaskExecutor<Boolean> {

    private static final org.slf4j.Logger LOG = org.slf4j.LoggerFactory.getLogger(ChangeConfidentialStorageKeyTaskExecutor.clreplaced);

    public static String PARAMETER_OLD_CONFIDENTIAL_KEY = "oldCryptKey";

    private GuardedString oldCryptKey = null;

    private int PAGE_SIZE = 100;

    private int KEY_LENGTH = 16;

    @Autowired
    private IdmConfidentialStorageValueService confidetialStorageValueService;

    @Autowired
    private ConfidentialStorage confidentialStorage;

    @Override
    public void init(Map<String, Object> properties) {
        super.init(properties);
        // 
        String oldKeyInString = getParameterConverter().toString(properties, PARAMETER_OLD_CONFIDENTIAL_KEY);
        if (oldKeyInString == null || oldKeyInString.isEmpty()) {
            LOG.error("Old key cannot be null or empty.");
            throw new ResultCodeException(CoreResultCode.BAD_VALUE, "Old key cannot be null or empty.");
        }
        if (oldKeyInString.length() != KEY_LENGTH) {
            LOG.error("Length of old key has to be [{}] characters.", KEY_LENGTH);
            throw new ResultCodeException(CoreResultCode.BAD_VALUE, MessageFormat.format("Length of old key has to be [{0}] characters.", KEY_LENGTH));
        }
        this.oldCryptKey = new GuardedString(oldKeyInString);
    }

    @Override
    public List<String> getPropertyNames() {
        List<String> parameters = super.getPropertyNames();
        parameters.add(PARAMETER_OLD_CONFIDENTIAL_KEY);
        return parameters;
    }

    @Override
    public Boolean process() {
        int page = 0;
        boolean canContinue = true;
        counter = 0L;
        // 
        do {
            Page<IdmConfidentialStorageValueDto> values = confidetialStorageValueService.find(PageRequest.of(page, PAGE_SIZE, new Sort(Direction.ASC, AbstractEnreplacedy_.id.getName())));
            // 
            if (count == null) {
                count = values.getTotalElements();
            }
            // 
            for (Iterator<IdmConfidentialStorageValueDto> iterator = values.iterator(); iterator.hasNext() && canContinue; ) {
                IdmConfidentialStorageValueDto value = iterator.next();
                replacedert.notNull(value, "Value is required.");
                replacedert.notNull(value.getId(), "Value identifier is required.");
                // 
                try {
                    confidentialStorage.changeCryptKey(value, oldCryptKey);
                    counter++;
                    // 
                    this.logItemProcessed(value, new OperationResult.Builder(OperationState.EXECUTED).build());
                } catch (Exception ex) {
                    LOG.error("Error during change confidential storage key. For key [{}].", value.getKey(), ex);
                    this.logItemProcessed(value, new OperationResult.Builder(OperationState.EXCEPTION).setCause(ex).build());
                }
                // 
                canContinue &= this.updateState();
            }
            canContinue &= values.hasNext();
            ++page;
        // 
        } while (canContinue);
        return Boolean.TRUE;
    }
}

19 View Complete Implementation : WsIDETLJob.java
Copyright Apache License 2.0
Author : boubei-com
/**
 * 按ID抽取Job,数据通过WebService接口被抽取到母体BI系统的数据表
 */
@DisallowConcurrentExecution
public clreplaced WsIDETLJob extends ByIDETLJob {

    protected String etlType() {
        return "wsID";
    }

    protected Long[] etlByID(Task task, Long startID) {
        Report report = new Report();
        report.setName(task.getName());
        report.setDatasource(task.getSourceDS());
        report.setScript(task.getSourceScript());
        report.setParam("[{'label':'maxID', 'type':'number'}]");
        Map<String, String> paramsMap = new HashMap<String, String>();
        paramsMap.put("param1", String.valueOf(startID));
        SQLExcutor ex = ReportQuery.excute(report, paramsMap, 1, 0);
        if (ex.count == 0) {
            return new Long[] { 0L, startID };
        }
        Long maxID = startID;
        StringBuffer data = new StringBuffer();
        data.append(EasyUtils.list2Str(ex.selectFields) + ",licenseowner").append("\n");
        for (Map<String, Object> row : ex.result) {
            Collection<Object> values = new ArrayList<Object>();
            for (String field : ex.selectFields) {
                Object value = row.get(field);
                if (field.equals("id")) {
                    maxID = Math.max(maxID, EasyUtils.obj2Long(value));
                    // id不要
                    value = "";
                }
                values.add(DMUtil.preTreatVal(value));
            }
            values.add(InstallListener.licenseOwner());
            data.append(EasyUtils.list2Str(values)).append("\n");
        }
        MatrixUtil.remoteRecordBatch(task.getTargetScript(), data.toString());
        return new Long[] { (long) ex.count, maxID };
    }
}

19 View Complete Implementation : SysExpressionJobImpl.java
Copyright Apache License 2.0
Author : billchen198318
/**
 * 注意: 這個Job 在 Quartz 中的設定, 要每分鐘都需執行處理
 */
@DisallowConcurrentExecution
public clreplaced SysExpressionJobImpl extends BaseJob implements Job {

    protected static Logger log = Logger.getLogger(SysExpressionJobImpl.clreplaced);

    private static final String _CONFIG = "SysExpressionJob.json";

    private static String _datas = " { } ";

    private static Map<String, Object> _configDataMap;

    static {
        try {
            InputStream is = SysExpressionJobImpl.clreplaced.getClreplacedLoader().getResource(_CONFIG).openStream();
            _datas = IOUtils.toString(is, Constants.BASE_ENCODING);
            is.close();
            is = null;
            _configDataMap = loadDatas();
        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            if (null == _configDataMap) {
                _configDataMap = new HashMap<String, Object>();
            }
        }
    }

    @SuppressWarnings("unchecked")
    public static Map<String, Object> loadDatas() {
        Map<String, Object> datas = null;
        try {
            datas = (Map<String, Object>) new ObjectMapper().readValue(_datas, LinkedHashMap.clreplaced);
        } catch (Exception e) {
            e.printStackTrace();
        }
        return datas;
    }

    @Override
    protected void executeInternal(JobExecutionContext context) throws JobExecutionException {
        if (ContextLoader.getCurrentWebApplicationContext() == null) {
            log.warn("ApplicationContext no completed, AppContext.getApplicationContext() == null");
            return;
        }
        if (this.checkCurrentlyExecutingJobs(context, this)) {
            log.warn("Same schedule job, current working...");
            return;
        }
        try {
            this.loginForBackgroundProgram();
            /*
			List<ExpressionJobObj> jobObjList = this.getExpressionJobs();
			if (jobObjList == null || jobObjList.size() < 1) {
				return;
			}
			ExecutorService exprJobPool = Executors.newFixedThreadPool( SimpleUtils.getAvailableProcessors(jobObjList.size()) );
			for (ExpressionJobObj jobObj : jobObjList) {
				jobObj = exprJobPool.submit( new ExpressionJobExecuteCallable(jobObj) ).get();
			}
			exprJobPool.shutdown();
			*/
            SystemExpressionJobUtils.executeJobs();
        } catch (ServiceException e) {
            e.printStackTrace();
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            try {
                this.logoutForBackgroundProgram();
            } catch (Exception e) {
                e.printStackTrace();
            }
            this.finalProcess();
        }
    }

    private void finalProcess() {
        String finalProcessClreplacedName = "";
        String methodName = "";
        if (null != _configDataMap && !StringUtils.isBlank(finalProcessClreplacedName = (String) _configDataMap.get("finalProcessClreplaced"))) {
            methodName = StringUtils.defaultString((String) _configDataMap.get("method"));
            try {
                Clreplaced<?> finalProcessClreplaced = Clreplaced.forName(finalProcessClreplacedName);
                Method[] methods = finalProcessClreplaced.getMethods();
                for (Method method : methods) {
                    if (method.getName().equals(methodName) && Modifier.isStatic(method.getModifiers())) {
                        try {
                            method.invoke(finalProcessClreplaced);
                        } catch (IllegalAccessException e) {
                            e.printStackTrace();
                        } catch (IllegalArgumentException e) {
                            e.printStackTrace();
                        } catch (InvocationTargetException e) {
                            e.printStackTrace();
                        }
                    }
                }
            } catch (ClreplacedNotFoundException e) {
                e.printStackTrace();
            }
        }
    }
    /*
	private boolean isRunTime(SysExprJobVO exprJob, String dayOfWeek, String hour, String minute) {
		
		// 查 DAY_OF_WEEK
		if (!ExpressionJobConstants.DATEOFWEEK_HOUR_MINUTE_ALL.equals(exprJob.getRunDayOfWeek()) 
				&& !dayOfWeek.equals(exprJob.getRunDayOfWeek())) {
			return false;
		}
		
		// 查 HOUR
		if (!ExpressionJobConstants.DATEOFWEEK_HOUR_MINUTE_ALL.equals(exprJob.getRunHour()) 
				&& !hour.equals(exprJob.getRunHour())) {
			return false;
		}	
		
		// 查MINUTE
		if (!ExpressionJobConstants.DATEOFWEEK_HOUR_MINUTE_ALL.equals(exprJob.getRunMinute()) 
				&& !minute.equals(exprJob.getRunMinute())) {
			return false;
		}
		
		return true;
	}
	
	private List<ExpressionJobObj> getExpressionJobs() throws ServiceException, Exception {
		int year = Integer.parseInt(SimpleUtils.getStrYMD(SimpleUtils.IS_YEAR));
		int month = Integer.parseInt(SimpleUtils.getStrYMD(SimpleUtils.IS_MONTH));
		String dayOfWeek = String.valueOf( SimpleUtils.getDayOfWeek(year, month) );
		String hour = String.valueOf( LocalDateTime.now().getHourOfDay() );
		String minute = String.valueOf( LocalDateTime.now().getMinuteOfHour() );		
		List<ExpressionJobObj> jobObjList = new ArrayList<ExpressionJobObj>();
		@SuppressWarnings("unchecked")
		ISysExprJobService<SysExprJobVO, TbSysExprJob, String> sysExprJobService = 
				(ISysExprJobService<SysExprJobVO, TbSysExprJob, String>) AppContext.getBean("core.service.SysExprJobService");
		Map<String, Object> paramMap = new HashMap<String, Object>();
		paramMap.put("system", Constants.getSystem());
		paramMap.put("active", YesNo.YES);
		List<SysExprJobVO> exprJobList = sysExprJobService.findListVOByParams(paramMap);
		if (null == exprJobList || exprJobList.size() < 1) {
			return jobObjList;
		}
		@SuppressWarnings("unchecked")
		ISysExpressionService<SysExpressionVO, TbSysExpression, String> sysExpressionService =
				(ISysExpressionService<SysExpressionVO, TbSysExpression, String>) AppContext.getBean("core.service.SysExpressionService");
		for (SysExprJobVO exprJob : exprJobList) {
			if (ExpressionJobConstants.RUNSTATUS_PROCESS_NOW.equals(exprJob.getRunStatus())) {
				log.warn( "Expression Job is process now: " + exprJob.getExprId() + " - " + exprJob.getName() );				
				continue;
			}
			if (!this.isRunTime(exprJob, dayOfWeek, hour, minute)) {
				continue;
			}
			ExpressionJobObj jobObj = new ExpressionJobObj();
			jobObj.setSysExprJob(exprJob);
			jobObj.setSysExprJobLog( new SysExprJobLogVO() );
			SysExpressionVO expr = new SysExpressionVO();
			expr.setExprId(exprJob.getExprId());
			DefaultResult<SysExpressionVO> exprResult = sysExpressionService.findByUK(expr);
			if (exprResult.getValue() == null) {
				log.error( "Expression Id: " + exprJob.getExprId() + " , data not found.");				
				log.error( exprResult.getSystemMessage().getValue() );
				continue;
			}
			expr = exprResult.getValue();
			jobObj.setSysExpression(expr);
			jobObjList.add(jobObj);			
		}
		return jobObjList;
	}
	*/
}

19 View Complete Implementation : ExecuteScriptTaskExecutor.java
Copyright MIT License
Author : bcvsolutions
/**
 * Long running task for execute script by code.
 *
 * @author Ondrej Kopr <[email protected]>
 */
@Service
@PersistJobDataAfterExecution
@DisallowConcurrentExecution
@Description("Long running task for execute script by code.")
public clreplaced ExecuteScriptTaskExecutor extends AbstractSchedulableTaskExecutor<Boolean> {

    private static final org.slf4j.Logger LOG = org.slf4j.LoggerFactory.getLogger(ExecuteScriptTaskExecutor.clreplaced);

    private static String PARAMETER_SCRIPT_CODE = "scriptCode";

    private String scriptCode;

    @Autowired
    private IdmScriptService scriptService;

    @Autowired
    private DefaultSystemScriptEvaluator scriptEvaluator;

    @Override
    public void init(Map<String, Object> properties) {
        super.init(properties);
        // 
        this.scriptCode = getParameterConverter().toString(properties, PARAMETER_SCRIPT_CODE);
        // 
        getScriptByCode(this.scriptCode);
    }

    @Override
    public Boolean process() {
        LOG.info("Start script with code: [{}]", scriptCode);
        scriptEvaluator.evaluate(scriptEvaluator.newBuilder().setScriptCode(scriptCode).addParameter("scriptEvaluator", scriptEvaluator).addParameter("task", this).build());
        return Boolean.TRUE;
    }

    private IdmScriptDto getScriptByCode(String code) {
        IdmScriptDto script = scriptService.getByCode(code);
        if (script == null) {
            LOG.error("Script with code: [{}], not found.", code);
            throw new ResultCodeException(CoreResultCode.NOT_FOUND, ImmutableMap.of("enreplacedy", code));
        }
        return script;
    }

    @Override
    public List<String> getPropertyNames() {
        List<String> parameters = super.getPropertyNames();
        parameters.add(PARAMETER_SCRIPT_CODE);
        return parameters;
    }
}

19 View Complete Implementation : RepositoryManualScreenshotRunJob.java
Copyright Apache License 2.0
Author : box
/**
 * @author jeanaurambault
 */
@Profile("!disablescheduling")
@Configuration
@Component
@DisallowConcurrentExecution
public clreplaced RepositoryManualScreenshotRunJob implements Job {

    static Logger logger = LoggerFactory.getLogger(RepositoryManualScreenshotRunJob.clreplaced);

    @Autowired
    TaskScheduler taskScheduler;

    @Autowired
    RepositoryRepository repositoryRepository;

    @Autowired
    RepositoryService repositoryService;

    @Override
    public void execute(JobExecutionContext context) throws JobExecutionException {
        logger.info("One time update of manual screenshot run on repository objects");
        List<Repository> repositories = repositoryRepository.findAll();
        for (Repository repository : repositories) {
            if (repository.getManualScreenshotRun() == null) {
                repositoryService.addManualScreenshotRun(repository);
            }
        }
    }

    @Bean(name = "repositoryManualScreenshotRun")
    public JobDetailFactoryBean jobDetailRepositoryManualScreenshotRunJob() {
        JobDetailFactoryBean jobDetailFactory = new JobDetailFactoryBean();
        jobDetailFactory.setJobClreplaced(RepositoryManualScreenshotRunJob.clreplaced);
        jobDetailFactory.setDescription("One time update of manual screenshot run on repository objects");
        jobDetailFactory.setDurability(true);
        return jobDetailFactory;
    }

    @Bean
    public SimpleTriggerFactoryBean triggerRepositoryManualScreenshotRunJob(@Qualifier("repositoryManualScreenshotRun") JobDetail job) {
        SimpleTriggerFactoryBean simpleTriggerFactoryBean = new SimpleTriggerFactoryBean();
        simpleTriggerFactoryBean.setJobDetail(job);
        simpleTriggerFactoryBean.setRepeatCount(0);
        return simpleTriggerFactoryBean;
    }
}

19 View Complete Implementation : AbstractInterruptableJob.java
Copyright Apache License 2.0
Author : apache
@DisallowConcurrentExecution
public abstract clreplaced AbstractInterruptableJob implements InterruptableJob {

    private final JobDelegate embeddedDelegate = new JobDelegate() {

        @Override
        public String currentStatus() {
            return "RUNNING";
        }

        @Override
        public void interrupt() {
        }

        @Override
        public boolean isInterrupted() {
            return false;
        }
    };

    public JobDelegate getDelegate() {
        return embeddedDelegate;
    }

    @Override
    public void interrupt() throws UnableToInterruptJobException {
        getDelegate().interrupt();
    }
}

19 View Complete Implementation : RepositoryStatisticsJob.java
Copyright Apache License 2.0
Author : box
/**
 * Update {@link RepositoryStatistic}s.
 *
 * @author jaurambault
 */
@Component
@DisallowConcurrentExecution
public clreplaced RepositoryStatisticsJob extends SchedulableJob {

    /**
     * logger
     */
    static Logger logger = LoggerFactory.getLogger(RepositoryStatisticsJob.clreplaced);

    static final String REPOSITORY_ID = "repositoryId";

    @Autowired
    RepositoryStatisticService repositoryStatisticService;

    @Override
    public void execute(JobExecutionContext context) throws JobExecutionException {
        Long repositoryId = context.getMergedJobDataMap().getLong(REPOSITORY_ID);
        logger.info("Execute for repositoryId: {}", repositoryId);
        repositoryStatisticService.updateStatistics(repositoryId);
    }

    @Override
    protected String getDescription() {
        return "Update statistics of a repository";
    }

    public void schedule(Long repositoryId) {
        JobDataMap jobDataMap = new JobDataMap();
        jobDataMap.put(REPOSITORY_ID, repositoryId.toString());
        schedule(jobDataMap, REPOSITORY_ID);
    }
}

19 View Complete Implementation : IdentityRoleExpirationTaskExecutor.java
Copyright MIT License
Author : bcvsolutions
/**
 * Long running task for expired idenreplacedy roles removal.
 * Expected usage is in cooperation with CronTaskTrigger, running
 * once a day after midnight.
 *
 * TODO: statefull + continue on exception
 * FIXME: create role request!
 *
 * @author Jan Helbich
 * @author Radek Tomiška
 */
@Service
@DisallowConcurrentExecution
@Description("Removes expired roles from idenreplacedes.")
public clreplaced IdenreplacedyRoleExpirationTaskExecutor extends AbstractSchedulableTaskExecutor<Boolean> {

    private static final Logger LOG = LoggerFactory.getLogger(IdenreplacedyRoleExpirationTaskExecutor.clreplaced);

    // 
    @Autowired
    private IdmIdenreplacedyRoleService service;

    // 
    private LocalDate expiration;

    @Override
    public void init(Map<String, Object> properties) {
        super.init(properties);
        // 
        expiration = LocalDate.now();
        LOG.info("Expired roles removal task was inintialized for expiration less than [{}].", expiration);
    }

    @Override
    public Boolean process() {
        this.counter = 0L;
        // 
        int pageSize = 100;
        boolean hasNextPage = false;
        do {
            // 0 => from start - roles from previous search are already removed
            Page<IdmIdenreplacedyRoleDto> replacedignedRoles = service.findExpiredRoles(expiration, PageRequest.of(0, pageSize));
            hasNextPage = replacedignedRoles.hasContent();
            if (count == null) {
                count = replacedignedRoles.getTotalElements();
            }
            for (Iterator<IdmIdenreplacedyRoleDto> i = replacedignedRoles.iterator(); i.hasNext() && hasNextPage; ) {
                IdmIdenreplacedyRoleDto replacedignedRole = i.next();
                if (replacedignedRole.getDirectRole() == null) {
                    // sub role will be removed by it's direct role
                    LOG.debug("Remove role: [{}] from contract id: [{}].", replacedignedRole.getRole(), replacedignedRole.getIdenreplacedyContract());
                    service.delete(replacedignedRole);
                }
                ++counter;
                hasNextPage &= updateState();
            }
        } while (hasNextPage);
        LOG.info("Expired roles removal task ended. Removed roles: [{}].", counter);
        return Boolean.TRUE;
    }
}

19 View Complete Implementation : EmitJob.java
Copyright Eclipse Public License 1.0
Author : eclipse
/**
 * The Clreplaced EmitJob is responsible for emitting {@link org.eclipse.kura.wire.WireRecord} every specified
 * interval (or specified CRON job interval)
 */
@DisallowConcurrentExecution
public final clreplaced EmitJob implements Job {

    /**
     * Emits a {@link org.eclipse.kura.wire.WireRecord} every specified interval.
     *
     * @param context
     *            the Job Execution context
     * @throws JobExecutionException
     *             the job execution exception
     */
    @Override
    public void execute(final JobExecutionContext context) throws JobExecutionException {
        final TimerJobDataMap dataMap = (TimerJobDataMap) context.getJobDetail().getJobDataMap();
        Timer.emit(dataMap.getWireSupport());
    }
}

19 View Complete Implementation : SparkSubmitJob.java
Copyright Apache License 2.0
Author : apache
/**
 * Simple implementation of the Quartz Job interface, submitting the
 * griffin job to spark cluster via livy
 *
 * @see LivyTaskSubmitHelper#postToLivy(String)
 * @see Job#execute(JobExecutionContext)
 */
@PersistJobDataAfterExecution
@DisallowConcurrentExecution
@Component
public clreplaced SparkSubmitJob implements Job {

    private static final Logger LOGGER = LoggerFactory.getLogger(SparkSubmitJob.clreplaced);

    @Autowired
    private JobInstanceRepo jobInstanceRepo;

    @Autowired
    private BatchJobOperatorImpl batchJobOp;

    @Autowired
    private Environment env;

    @Autowired
    private LivyTaskSubmitHelper livyTaskSubmitHelper;

    @Value("${livy.need.queue:false}")
    private boolean isNeedLivyQueue;

    @Value("${livy.task.appId.retry.count:3}")
    private int appIdRetryCount;

    private GriffinMeasure measure;

    private String livyUri;

    private List<SegmentPredicate> mPredicates;

    private JobInstanceBean jobInstance;

    @Override
    public void execute(JobExecutionContext context) {
        JobDetail jd = context.getJobDetail();
        try {
            if (isNeedLivyQueue) {
                // livy batch limit
                livyTaskSubmitHelper.addTaskToWaitingQueue(jd);
            } else {
                saveJobInstance(jd);
            }
        } catch (Exception e) {
            LOGGER.error("Post spark task ERROR.", e);
        }
    }

    private void updateJobInstanceState(JobExecutionContext context) throws IOException {
        SimpleTrigger simpleTrigger = (SimpleTrigger) context.getTrigger();
        int repeatCount = simpleTrigger.getRepeatCount();
        int fireCount = simpleTrigger.getTimesTriggered();
        if (fireCount > repeatCount) {
            saveJobInstance(null, NOT_FOUND);
        }
    }

    private String post2Livy() {
        return livyTaskSubmitHelper.postToLivy(livyUri);
    }

    private boolean success(List<SegmentPredicate> predicates) {
        if (CollectionUtils.isEmpty(predicates)) {
            return true;
        }
        for (SegmentPredicate segPredicate : predicates) {
            Predicator predicator = PredicatorFactory.newPredicateInstance(segPredicate);
            try {
                if (predicator != null && !predicator.predicate()) {
                    return false;
                }
            } catch (Exception e) {
                return false;
            }
        }
        return true;
    }

    private void initParam(JobDetail jd) throws IOException {
        mPredicates = new ArrayList<>();
        jobInstance = jobInstanceRepo.findByPredicateName(jd.getJobDataMap().getString(PREDICATE_JOB_NAME));
        measure = toEnreplacedy(jd.getJobDataMap().getString(MEASURE_KEY), GriffinMeasure.clreplaced);
        livyUri = env.getProperty("livy.uri");
        setPredicates(jd.getJobDataMap().getString(PREDICATES_KEY));
        // in order to keep metric name unique, we set job name
        // as measure name at present
        measure.setName(jd.getJobDataMap().getString(JOB_NAME));
    }

    @SuppressWarnings({ "unchecked", "rawtypes" })
    private void setPredicates(String json) throws IOException {
        if (StringUtils.isEmpty(json)) {
            return;
        }
        List<SegmentPredicate> predicates = toEnreplacedy(json, new TypeReference<List<SegmentPredicate>>() {
        });
        if (predicates != null) {
            mPredicates.addAll(predicates);
        }
    }

    private String escapeCharacter(String str, String regex) {
        if (StringUtils.isEmpty(str)) {
            return str;
        }
        String escapeCh = "\\" + regex;
        return str.replaceAll(regex, escapeCh);
    }

    private String genEnv() {
        ProcessType type = measure.getProcessType();
        String env = type == BATCH ? ENV_BATCH : ENV_STREAMING;
        return env.replaceAll("\\$\\{JOB_NAME}", measure.getName());
    }

    private void setLivyConf() throws IOException {
        setLivyArgs();
    }

    private void setLivyArgs() throws IOException {
        List<String> args = new ArrayList<>();
        args.add(genEnv());
        String measureJson = JsonUtil.toJsonWithFormat(measure);
        // to fix livy bug: character will be ignored by livy
        String finalMeasureJson = escapeCharacter(measureJson, "\\`");
        LOGGER.info(finalMeasureJson);
        args.add(finalMeasureJson);
        args.add("raw,raw");
        livyConfMap.put("args", args);
    }

    protected void saveJobInstance(JobDetail jd) throws SchedulerException, IOException {
        // If result is null, it may livy uri is wrong
        // or livy parameter is wrong.
        initParam(jd);
        setLivyConf();
        if (!success(mPredicates)) {
            updateJobInstanceState((JobExecutionContext) jd);
            return;
        }
        Map<String, Object> resultMap = post2LivyWithRetry();
        String group = jd.getKey().getGroup();
        String name = jd.getKey().getName();
        batchJobOp.deleteJob(group, name);
        LOGGER.info("Delete predicate job({},{}) SUCCESS.", group, name);
        setJobInstance(resultMap, FOUND);
        jobInstanceRepo.save(jobInstance);
    }

    private Map<String, Object> post2LivyWithRetry() throws IOException {
        String result = post2Livy();
        Map<String, Object> resultMap = null;
        if (result != null) {
            resultMap = livyTaskSubmitHelper.retryLivyGetAppId(result, appIdRetryCount);
            if (resultMap != null) {
                livyTaskSubmitHelper.increaseCurTaskNum(Long.valueOf(String.valueOf(resultMap.get("id"))).longValue());
            }
        }
        return resultMap;
    }

    protected void saveJobInstance(String result, State state) throws IOException {
        TypeReference<HashMap<String, Object>> type = new TypeReference<HashMap<String, Object>>() {
        };
        Map<String, Object> resultMap = null;
        if (result != null) {
            resultMap = toEnreplacedy(result, type);
        }
        setJobInstance(resultMap, state);
        jobInstanceRepo.save(jobInstance);
    }

    private void setJobInstance(Map<String, Object> resultMap, State state) {
        jobInstance.setState(state);
        jobInstance.setPredicateDeleted(true);
        if (resultMap != null) {
            Object status = resultMap.get("state");
            Object id = resultMap.get("id");
            Object appId = resultMap.get("appId");
            jobInstance.setState(status == null ? null : State.valueOf(status.toString().toUpperCase()));
            jobInstance.setSessionId(id == null ? null : Long.parseLong(id.toString()));
            jobInstance.setAppId(appId == null ? null : appId.toString());
        }
    }
}

19 View Complete Implementation : ExpireRestoredBusinessObjectDataJob.java
Copyright Apache License 2.0
Author : FINRAOS
/**
 * The system job that expires restored business object data.
 */
@Component(ExpireRestoredBusinessObjectDataJob.JOB_NAME)
@DisallowConcurrentExecution
public clreplaced ExpireRestoredBusinessObjectDataJob extends AbstractSystemJob {

    private static final Logger LOGGER = LoggerFactory.getLogger(ExpireRestoredBusinessObjectDataJob.clreplaced);

    public static final String JOB_NAME = "expireRestoredBusinessObjectData";

    @Autowired
    private ExpireRestoredBusinessObjectDataService expireRestoredBusinessObjectDataService;

    @Autowired
    private BusinessObjectDataHelper businessObjectDataHelper;

    @Autowired
    private JsonHelper jsonHelper;

    @Autowired
    private ParameterHelper parameterHelper;

    @Override
    protected void executeInternal(JobExecutionContext context) throws JobExecutionException {
        // Log that the system job is started.
        LOGGER.info("Started system job. systemJobName=\"{}\"", JOB_NAME);
        // Get the parameter values.
        int maxBusinessObjectDataInstancesToProcess = parameterHelper.getParameterValueAsInteger(parameters, ConfigurationValue.EXPIRE_RESTORED_BDATA_JOB_MAX_BDATA_INSTANCES);
        // Log the parameter values.
        LOGGER.info("systemJobName=\"{}\" {}={}", JOB_NAME, ConfigurationValue.EXPIRE_RESTORED_BDATA_JOB_MAX_BDATA_INSTANCES, maxBusinessObjectDataInstancesToProcess);
        // Continue the processing only if the maximum number of business object data instances
        // that is allowed to be processed in a single run of this system job is greater than zero.
        int processedBusinessObjectDataInstances = 0;
        if (maxBusinessObjectDataInstancesToProcess > 0) {
            // Select restored business object data that is already expired.
            List<BusinessObjectDataStorageUnitKey> storageUnitKeys = expireRestoredBusinessObjectDataService.getS3StorageUnitsToExpire(maxBusinessObjectDataInstancesToProcess);
            // Log the number of storage units selected for processing.
            LOGGER.info("Selected for processing S3 storage units. systemJobName=\"{}\" storageUnitCount={}", JOB_NAME, storageUnitKeys.size());
            // Try to expire each of the selected storage units.
            for (BusinessObjectDataStorageUnitKey storageUnitKey : storageUnitKeys) {
                try {
                    expireRestoredBusinessObjectDataService.expireS3StorageUnit(storageUnitKey);
                    processedBusinessObjectDataInstances += 1;
                } catch (RuntimeException runtimeException) {
                    // Log the exception.
                    LOGGER.error("Failed to expire a restored business object data. systemJobName=\"{}\" storageName=\"{}\" businessObjectDataKey={}", JOB_NAME, storageUnitKey.getStorageName(), jsonHelper.objectToJson(businessObjectDataHelper.createBusinessObjectDataKeyFromStorageUnitKey(storageUnitKey)), runtimeException);
                }
            }
        }
        // Log the number of finalized restores.
        LOGGER.info("Expired restored business object data instances. systemJobName=\"{}\" businessObjectDataCount={}", JOB_NAME, processedBusinessObjectDataInstances);
        // Log that the system job is ended.
        LOGGER.info("Completed system job. systemJobName=\"{}\"", JOB_NAME);
    }

    @Override
    public void validateParameters(List<Parameter> parameters) {
        // This system job accepts only one optional parameter with an integer value.
        if (!CollectionUtils.isEmpty(parameters)) {
            replacedert.isTrue(parameters.size() == 1, String.format("Too many parameters are specified for \"%s\" system job.", JOB_NAME));
            replacedert.isTrue(parameters.get(0).getName().equalsIgnoreCase(ConfigurationValue.EXPIRE_RESTORED_BDATA_JOB_MAX_BDATA_INSTANCES.getKey()), String.format("Parameter \"%s\" is not supported by \"%s\" system job.", parameters.get(0).getName(), JOB_NAME));
            parameterHelper.getParameterValueAsInteger(parameters.get(0));
        }
    }

    @Override
    public JobDataMap getJobDataMap() {
        return getJobDataMap(ConfigurationValue.EXPIRE_RESTORED_BDATA_JOB_MAX_BDATA_INSTANCES);
    }

    @Override
    public String getCronExpression() {
        return configurationHelper.getProperty(ConfigurationValue.EXPIRE_RESTORED_BDATA_JOB_CRON_EXPRESSION);
    }
}

19 View Complete Implementation : SelectCurrentContractSliceTaskExecutor.java
Copyright MIT License
Author : bcvsolutions
/**
 * Recalculate current using slices as contract. Find all slices which should be
 * for actual date using as contract and copy their values to parent contracts.
 *
 * @author svandav
 */
@Service
@DisallowConcurrentExecution
@Description("Recalculate current using slices as contract. Find all slices which should be for actual date using as contract and copy their values to parent contracts.")
public clreplaced SelectCurrentContractSliceTaskExecutor extends AbstractSchedulableTaskExecutor<OperationResult> {

    private static final Logger LOG = LoggerFactory.getLogger(SelectCurrentContractSliceTaskExecutor.clreplaced);

    @Autowired
    private ContractSliceManager contractSliceManager;

    @Override
    @Transactional
    public OperationResult process() {
        // Found all unvalid slices
        List<IdmContractSliceDto> unvalidSlices = contractSliceManager.findUnvalidSlices(null).getContent();
        boolean canContinue = true;
        // 
        this.counter = 0L;
        this.count = Long.valueOf(unvalidSlices.size());
        StringBuilder duplicitiesMessage = null;
        List<UUID> invalidContracts = new ArrayList<>();
        for (IdmContractSliceDto slice : unvalidSlices) {
            // Start recalculation
            List<IdmContractSliceDto> duplicatedSlices = // 
            unvalidSlices.stream().filter(// 
            s -> s.getParentContract() != null && s.getParentContract().equals(slice.getParentContract())).collect(Collectors.toList());
            if (duplicatedSlices.size() > 1) {
                String errorMsg = MessageFormat.format("We found more then once slice [{1}] which should be use as contract. This is not allowed. None from this slices will be used as contract. It means contract [{0}] is in incorrect state now!", slice.getParentContract(), duplicatedSlices.size());
                LOG.warn(errorMsg);
                if (duplicitiesMessage == null) {
                    duplicitiesMessage = new StringBuilder();
                }
                if (!invalidContracts.contains(slice.getParentContract())) {
                    duplicitiesMessage.append(slice.getParentContract()).append(',');
                    invalidContracts.add(slice.getParentContract());
                }
            } else {
                contractSliceManager.setSliceAsCurrentlyUsing(slice, null);
                // 
                counter++;
            }
            canContinue = updateState();
            if (!canContinue) {
                break;
            }
        }
        if (duplicitiesMessage != null) {
            return new OperationResult.Builder(OperationState.EXCEPTION).setException(new ResultCodeException(CoreResultCode.CONTRACT_SLICE_DUPLICATE_CANDIDATES, ImmutableMap.of("contracts", duplicitiesMessage.toString()))).build();
        }
        return new OperationResult.Builder(OperationState.EXECUTED).build();
    }

    @Override
    protected OperationResult end(OperationResult result, Exception ex) {
        if (result != null && result.getException() != null) {
            return super.end(result, (Exception) result.getException());
        }
        return super.end(result, ex);
    }
}

19 View Complete Implementation : TestTaskExecutor.java
Copyright MIT License
Author : bcvsolutions
/**
 * Test task executor implementation
 *
 * @author Radek Tomiška
 */
@Component(TestTaskExecutor.TASK_NAME)
@DisallowConcurrentExecution
@Description("Test long running task")
@ConditionalOnProperty(prefix = "idm.pub.app", name = "stage", havingValue = "development")
public clreplaced TestTaskExecutor extends AbstractSchedulableStatefulExecutor<IdmIdenreplacedyDto> {

    public static final String TASK_NAME = "core-test-long-running-task";

    private static final org.slf4j.Logger LOG = org.slf4j.LoggerFactory.getLogger(TestTaskExecutor.clreplaced);

    private static final String PARAMETER_COUNT = "count";

    private static final long DEFAULT_COUNT = 100L;

    private String description;

    @Override
    public String getName() {
        return TASK_NAME;
    }

    @Override
    public void init(Map<String, Object> properties) {
        super.init(properties);
        // 
        count = getParameterConverter().toLong(properties, PARAMETER_COUNT);
        if (count == null) {
            count = DEFAULT_COUNT;
        }
        counter = 0L;
    }

    @Override
    public Page<IdmIdenreplacedyDto> gereplacedemsToProcess(Pageable pageable) {
        List<IdmIdenreplacedyDto> idenreplacedies = new ArrayList<>();
        for (int i = 0; i < count; i++) {
            idenreplacedies.add(new IdmIdenreplacedyDto(UUID.randomUUID(), "test-" + i));
        }
        return new PageImpl<>(idenreplacedies);
    }

    @Override
    public Optional<OperationResult> processItem(IdmIdenreplacedyDto dto) {
        try {
            LOG.warn(".......... idenreplacedy: [{}]", dto.getUsername());
            Thread.sleep(300L);
            return Optional.of(new OperationResult.Builder(OperationState.EXECUTED).build());
        } catch (Exception ex) {
            throw new CoreException(ex);
        }
    }

    @Override
    public List<String> getPropertyNames() {
        List<String> parameters = super.getPropertyNames();
        parameters.add(PARAMETER_COUNT);
        return parameters;
    }

    @Override
    public Map<String, Object> getProperties() {
        Map<String, Object> properties = super.getProperties();
        properties.put(PARAMETER_COUNT, count);
        return properties;
    }

    @Override
    public List<IdmFormAttributeDto> getFormAttributes() {
        IdmFormAttributeDto countAttribute = new IdmFormAttributeDto(PARAMETER_COUNT, PARAMETER_COUNT, PersistentType.INT);
        countAttribute.setDefaultValue(String.valueOf(DEFAULT_COUNT));
        countAttribute.setRequired(true);
        // 
        return Lists.newArrayList(countAttribute);
    }

    @Override
    public boolean supportsDryRun() {
        return true;
    }

    @Override
    public String getDescription() {
        if (description != null) {
            return description;
        }
        return super.getDescription();
    }

    public void setDescription(String description) {
        this.description = description;
    }
}

19 View Complete Implementation : CursorJob.java
Copyright Apache License 2.0
Author : apache
@DisallowConcurrentExecution
public clreplaced CursorJob implements Job {

    public static final String CAPTURE_KEY = "capture";

    @Override
    public void execute(JobExecutionContext context) throws JobExecutionException {
        JobDataMap data = context.getJobDetail().getJobDataMap();
        CaptureScreen capture = (CaptureScreen) data.get(CAPTURE_KEY);
        if (!capture.getSendFrameGuard()) {
            capture.sendCursorStatus();
        }
    }
}

19 View Complete Implementation : SendMailHelperJobImpl.java
Copyright Apache License 2.0
Author : billchen198318
@DisallowConcurrentExecution
public clreplaced SendMailHelperJobImpl extends BaseJob implements Job {

    protected static Logger log = Logger.getLogger(SendMailHelperJobImpl.clreplaced);

    public SendMailHelperJobImpl() {
        super();
    }

    @Override
    protected void executeInternal(JobExecutionContext context) throws JobExecutionException {
        if (ContextLoader.getCurrentWebApplicationContext() == null) {
            log.warn("ApplicationContext no completed, AppContext.getApplicationContext() == null");
            return;
        }
        // log.info("begin....");
        if (this.checkCurrentlyExecutingJobs(context, this)) {
            log.warn("Same schedule job, current working...");
            return;
        }
        try {
            this.loginForBackgroundProgram();
            // log.info("Background Program userId: " + this.getAccountId());
            @SuppressWarnings("unchecked")
            ISysMailHelperService<SysMailHelperVO, TbSysMailHelper, String> sysMailHelperService = (ISysMailHelperService<SysMailHelperVO, TbSysMailHelper, String>) AppContext.getBean("core.service.SysMailHelperService");
            if (MailClientUtils.getEnable()) {
                String linkMailId = SimpleUtils.getStrYMD("").substring(0, 6);
                DefaultResult<List<TbSysMailHelper>> result = sysMailHelperService.findForJobList(linkMailId, YesNo.NO);
                if (result.getValue() != null) {
                    this.process(sysMailHelperService, result.getValue());
                }
            } else {
                log.warn("************ mail sender is disable. please modify config CNF/CNF_CONF002 ************");
            }
        } catch (ServiceException e) {
            e.printStackTrace();
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            try {
                this.logoutForBackgroundProgram();
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    // log.info("end....");
    }

    private void process(ISysMailHelperService<SysMailHelperVO, TbSysMailHelper, String> sysMailHelperService, List<TbSysMailHelper> mailHelperList) throws ServiceException, Exception {
        if (mailHelperList == null || mailHelperList.size() < 1) {
            return;
        }
        for (TbSysMailHelper mailHelper : mailHelperList) {
            new ProcessWorker(sysMailHelperService, mailHelper);
        }
    }

    private clreplaced ProcessWorker extends Thread {

        private ISysMailHelperService<SysMailHelperVO, TbSysMailHelper, String> sysMailHelperService = null;

        private TbSysMailHelper mailHelper = null;

        private Thread flag = this;

        // 3 - sec
        private long sleepTime = 3000;

        // 重試3次
        private int rety = 3;

        private boolean success = false;

        public ProcessWorker(ISysMailHelperService<SysMailHelperVO, TbSysMailHelper, String> sysMailHelperService, TbSysMailHelper mailHelper) {
            this.sysMailHelperService = sysMailHelperService;
            this.mailHelper = mailHelper;
            this.flag = this;
            this.start();
        }

        public void run() {
            this.flag = this;
            while (!this.success && this.flag == Thread.currentThread() && rety > 0) {
                if (this.mailHelper == null) {
                    this.flag = null;
                }
                try {
                    log.info("process mail-id: " + this.mailHelper.getMailId());
                    MailClientUtils.send(this.mailHelper.getMailFrom(), this.mailHelper.getMailTo(), this.mailHelper.getMailCc(), this.mailHelper.getMailBcc(), this.mailHelper.getSubject(), new String(this.mailHelper.getText(), "utf8"));
                    success = true;
                } catch (MailException e1) {
                    e1.printStackTrace();
                } catch (UnsupportedEncodingException e1) {
                    e1.printStackTrace();
                } catch (Exception e1) {
                    e1.printStackTrace();
                }
                if (success) {
                    try {
                        if (YesNo.YES.equals(this.mailHelper.getRetainFlag())) {
                            this.mailHelper.setSuccessFlag(YesNo.YES);
                            this.mailHelper.setSuccessTime(new Date());
                            this.sysMailHelperService.update(mailHelper);
                        } else {
                            this.sysMailHelperService.delete(mailHelper);
                        }
                    } catch (ServiceException e) {
                        e.printStackTrace();
                    } catch (Exception e) {
                        e.printStackTrace();
                    }
                    log.info("success mail-id: " + this.mailHelper.getMailId());
                    this.flag = null;
                }
                this.rety--;
                try {
                    Thread.sleep(this.sleepTime);
                } catch (InterruptedException e) {
                    e.printStackTrace();
                }
            }
            this.flag = null;
            MailClientUtils.clearThreadLocal();
        }
    }
}

19 View Complete Implementation : ClearDirtyStateForContractSliceTaskExecutor.java
Copyright MIT License
Author : bcvsolutions
/**
 * Task for clear all dirty states for all contract slices.
 *
 * @author Ondrej Kopr
 * @author Vít Švanda
 */
@Service
@DisallowConcurrentExecution
@Description("Clear dirty state for contract slices. During synchronization is set dirty flag for all processed slices. This task remove the flag.")
public clreplaced ClearDirtyStateForContractSliceTaskExecutor extends AbstractSchedulableTaskExecutor<OperationResult> {

    public final static String ORIGINAL_SLICE = "originalSlice";

    public final static String CURRENT_SLICE = "currentSlice";

    public final static String TO_DELETE = "toDelete";

    private static final org.slf4j.Logger LOG = org.slf4j.LoggerFactory.getLogger(ClearDirtyStateForContractSliceTaskExecutor.clreplaced);

    @Autowired
    private EnreplacedyStateManager enreplacedyStateManager;

    @Autowired
    private IdmContractSliceService contractSliceService;

    @Autowired
    private ContractSliceManager contractSliceManager;

    @Autowired
    private EnreplacedyManager enreplacedyManager;

    @Override
    public OperationResult process() {
        boolean canContinue = true;
        List<IdmEnreplacedyStateDto> dirtyStates = findAllDirtyStatesForSlices(null).getContent();
        if (count == null) {
            count = Long.valueOf(dirtyStates.size());
        }
        counter = 0l;
        List<IdmEnreplacedyStateDto> updateDirtyStates = Lists.newArrayList();
        List<IdmEnreplacedyStateDto> validDirtyStates = Lists.newArrayList();
        List<IdmEnreplacedyStateDto> futureDirtyStates = Lists.newArrayList();
        List<IdmEnreplacedyStateDto> unvalidDirtyStates = Lists.newArrayList();
        List<IdmEnreplacedyStateDto> deleteDirtyStates = Lists.newArrayList();
        dirtyStates.forEach(dirtyState -> {
            ResultModel resultModel = dirtyState.getResult().getModel();
            Map<String, Object> parameters = new HashMap<>();
            if (resultModel != null) {
                parameters = resultModel.getParameters();
            }
            boolean sliceIsToDelete = this.getBooleanProperty(ClearDirtyStateForContractSliceTaskExecutor.TO_DELETE, parameters);
            if (sliceIsToDelete) {
                deleteDirtyStates.add(dirtyState);
            } else {
                updateDirtyStates.add(dirtyState);
            }
        });
        updateDirtyStates.forEach(dirtyState -> {
            IdmContractSliceDto contractSliceDto = contractSliceService.get(dirtyState.getOwnerId());
            if (contractSliceDto == null) {
                DefaultResultModel model = new DefaultResultModel(CoreResultCode.NOT_FOUND, ImmutableMap.of("ownerId", dirtyState.getOwnerId()));
                this.logItemProcessed(dirtyState, new OperationResult.Builder(OperationState.NOT_EXECUTED).setModel(model).build());
                return;
            }
            // Temporary put current slice to the dirtyState
            dirtyState.getEmbedded().put(CURRENT_SLICE, contractSliceDto);
            // Divide slices by validity
            IdmIdenreplacedyContractDto mockContract = new IdmIdenreplacedyContractDto();
            contractSliceManager.convertSliceToContract(contractSliceDto, mockContract);
            if (!mockContract.isValidNowOrInFuture()) {
                unvalidDirtyStates.add(dirtyState);
            } else if (mockContract.isValid()) {
                validDirtyStates.add(dirtyState);
            } else {
                futureDirtyStates.add(dirtyState);
            }
        });
        // Process for new and updated slices - valid
        for (IdmEnreplacedyStateDto dirtyState : validDirtyStates) {
            canContinue = processState(canContinue, dirtyState);
            if (!canContinue) {
                break;
            }
        }
        // Process for new and updated slices - future valid
        for (IdmEnreplacedyStateDto dirtyState : futureDirtyStates) {
            canContinue = processState(canContinue, dirtyState);
            if (!canContinue) {
                break;
            }
        }
        // Process for new and updated slices - invalid
        for (IdmEnreplacedyStateDto dirtyState : unvalidDirtyStates) {
            canContinue = processState(canContinue, dirtyState);
            if (!canContinue) {
                break;
            }
        }
        // Process for slices to delete
        for (IdmEnreplacedyStateDto dirtyState : deleteDirtyStates) {
            replacedert.notNull(dirtyState, "State (dirty) is required.");
            replacedert.notNull(dirtyState.getId(), "State identifier (dirty) is required.");
            processItemToDelete(dirtyState);
            counter++;
            // flush and clear session - if LRT is wrapped in parent transaction, we need to
            // clear it (same behavior as in stateful tasks)
            if (getHibernateSession().isOpen()) {
                getHibernateSession().flush();
                getHibernateSession().clear();
            }
            canContinue &= this.updateState();
            if (!canContinue) {
                break;
            }
        }
        return new OperationResult(OperationState.EXECUTED);
    }

    /**
     * Process state for new or updated slices
     *
     * @param canContinue
     * @param dirtyState
     * @return
     */
    private boolean processState(boolean canContinue, IdmEnreplacedyStateDto dirtyState) {
        replacedert.notNull(dirtyState, "State (dirty) is required.");
        replacedert.notNull(dirtyState.getId(), "State identifier (dirty) is required.");
        processItem(dirtyState);
        counter++;
        // flush and clear session - if LRT is wrapped in parent transaction, we need to
        // clear it (same behavior as in stateful tasks)
        if (getHibernateSession().isOpen()) {
            getHibernateSession().flush();
            getHibernateSession().clear();
        }
        canContinue &= this.updateState();
        return canContinue;
    }

    private Session getHibernateSession() {
        return (Session) this.enreplacedyManager.getDelegate();
    }

    /**
     * Process one dirty state for contract slice
     *
     * @param dirtyState
     */
    private void processItem(IdmEnreplacedyStateDto dirtyState) {
        try {
            if (dirtyState.getOwnerType() == null || !dirtyState.getOwnerType().equals(IdmContractSlice.clreplaced.getName())) {
                this.logItemProcessed(dirtyState, new OperationResult.Builder(OperationState.NOT_EXECUTED).build());
                return;
            }
            IdmContractSliceDto contractSliceDto = (IdmContractSliceDto) dirtyState.getEmbedded().get(CURRENT_SLICE);
            if (contractSliceDto == null) {
                contractSliceDto = contractSliceService.get(dirtyState.getOwnerId());
            }
            if (contractSliceDto == null) {
                DefaultResultModel model = new DefaultResultModel(CoreResultCode.NOT_FOUND, ImmutableMap.of("ownerId", dirtyState.getOwnerId()));
                this.logItemProcessed(dirtyState, new OperationResult.Builder(OperationState.NOT_EXECUTED).setModel(model).build());
                return;
            }
            ResultModel resultModel = dirtyState.getResult().getModel();
            Map<String, Object> parameters = new HashMap<>();
            if (resultModel != null) {
                parameters = resultModel.getParameters();
            }
            IdmContractSliceDto originalSlice = null;
            Object originalSliceAsObject = parameters.get(ORIGINAL_SLICE);
            if (originalSliceAsObject instanceof IdmContractSliceDto) {
                originalSlice = (IdmContractSliceDto) originalSliceAsObject;
            }
            // Transform saved parameters into map string and serializable value
            Map<String, Serializable> transformedParameters = transformParameters(parameters);
            // Current using flag was sets to FALSE (during making as dirty), we want to force recalculate
            transformedParameters.put(IdmContractSliceService.FORCE_RECALCULATE_CURRENT_USING_SLICE, Boolean.TRUE);
            contractSliceManager.recalculateContractSlice(contractSliceDto, originalSlice, transformedParameters);
            this.logItemProcessed(contractSliceDto, new OperationResult.Builder(OperationState.EXECUTED).build());
            enreplacedyStateManager.deleteState(dirtyState);
        } catch (Exception e) {
            this.logItemProcessed(dirtyState, new OperationResult.Builder(OperationState.EXCEPTION).setCause(e).build());
        }
    }

    /**
     * Process one dirty state for contract slice to delete
     *
     * @param dirtyState
     */
    private void processItemToDelete(IdmEnreplacedyStateDto dirtyState) {
        try {
            if (dirtyState.getOwnerType() == null || !dirtyState.getOwnerType().equals(IdmContractSlice.clreplaced.getName())) {
                this.logItemProcessed(dirtyState, new OperationResult.Builder(OperationState.NOT_EXECUTED).build());
                return;
            }
            IdmContractSliceDto contractSliceDto = contractSliceService.get(dirtyState.getOwnerId());
            ;
            if (contractSliceDto == null) {
                DefaultResultModel model = new DefaultResultModel(CoreResultCode.NOT_FOUND, ImmutableMap.of("ownerId", dirtyState.getOwnerId()));
                this.logItemProcessed(dirtyState, new OperationResult.Builder(OperationState.NOT_EXECUTED).setModel(model).build());
                return;
            }
            ResultModel resultModel = dirtyState.getResult().getModel();
            Map<String, Object> parameters = new HashMap<>();
            if (resultModel != null) {
                parameters = resultModel.getParameters();
            }
            // Transform saved parameters into map string and serializable value
            Map<String, Serializable> transformedParameters = transformParameters(parameters);
            EnreplacedyEvent<IdmContractSliceDto> event = new ContractSliceEvent(ContractSliceEventType.DELETE, contractSliceDto, transformedParameters);
            // Delete slice (with recalculation)
            contractSliceService.publish(event);
            this.logItemProcessed(contractSliceDto, new OperationResult.Builder(OperationState.EXECUTED).build());
        } catch (Exception e) {
            this.logItemProcessed(dirtyState, new OperationResult.Builder(OperationState.EXCEPTION).setCause(e).build());
        }
    }

    /**
     * Find all dirty states for contract slices
     *
     * @param pageable
     * @return
     */
    private Page<IdmEnreplacedyStateDto> findAllDirtyStatesForSlices(Pageable pageable) {
        IdmEnreplacedyStateFilter filter = new IdmEnreplacedyStateFilter();
        filter.setResultCode(CoreResultCode.DIRTY_STATE.getCode());
        filter.setOwnerType(IdmContractSlice.clreplaced.getName());
        return enreplacedyStateManager.findStates(filter, pageable);
    }

    private boolean getBooleanProperty(String property, Map<String, Object> properties) {
        if (properties == null) {
            return false;
        }
        Object propertyValue = properties.get(property);
        if (propertyValue == null) {
            return false;
        }
        if ((Boolean) propertyValue) {
            return true;
        }
        return false;
    }

    /**
     * Transformation of parameters. Removes the temporary parameters
     *
     * @param parameters
     * @return
     */
    private Map<String, Serializable> transformParameters(Map<String, Object> parameters) {
        Map<String, Serializable> transformedParameters = new HashMap<>();
        parameters.forEach((key, value) -> {
            if (key != null && ORIGINAL_SLICE.equals(key)) {
            // skip original slice
            } else if (key != null && IdmContractSliceService.SET_DIRTY_STATE_CONTRACT_SLICE.equals(key)) {
            // remove skip recalculation for contract slice
            } else if (key != null && ClearDirtyStateForContractSliceTaskExecutor.CURRENT_SLICE.equals(key)) {
            // remove current slice
            } else if (key != null && ClearDirtyStateForContractSliceTaskExecutor.TO_DELETE.equals(key)) {
            // remove to delete for contract slice
            } else if (value == null) {
                transformedParameters.put(key, null);
            } else if (value instanceof Serializable) {
                transformedParameters.put(key, (Serializable) value);
            } else {
                LOG.error("Given value [{}] with key [{}] for parameters is not posible cast to serializable. Skip the value", value, key);
            }
        });
        return transformedParameters;
    }
}

19 View Complete Implementation : WsDayETLJob.java
Copyright Apache License 2.0
Author : boubei-com
/**
 * 按天抽取Job,可以配置多个,在不同时间点触发
 */
@DisallowConcurrentExecution
public clreplaced WsDayETLJob extends ByDayETLJob {

    protected String etlType() {
        return "wsDay";
    }

    /* 按天ETL */
    protected String etlByDay(Task task, Date day, List<Date> repeatList, boolean isFirstDay) {
        Report report = new Report();
        report.setName(task.getName());
        report.setDatasource(task.getSourceDS());
        report.setScript(task.getSourceScript());
        report.setParam("[{'label':'fromDay', 'type':'date'}, {'label':'toDay', 'type':'date'}]");
        Map<String, String> paramsMap = new HashMap<String, String>();
        paramsMap.put("param1", DateUtil.format(day));
        paramsMap.put("param2", DateUtil.format(DateUtil.addDays(day, 1)));
        SQLExcutor ex = ReportQuery.excute(report, paramsMap, 1, 0);
        if (ex.count == 0) {
            return "total=" + 0;
        }
        StringBuffer data = new StringBuffer();
        data.append(EasyUtils.list2Str(ex.selectFields) + ",licenseowner").append("\n");
        for (Map<String, Object> row : ex.result) {
            Collection<Object> values = new ArrayList<Object>();
            for (String field : ex.selectFields) {
                Object value = row.get(field);
                values.add(DMUtil.preTreatVal(value));
            }
            values.add(InstallListener.licenseOwner());
            data.append(EasyUtils.list2Str(values)).append("\n");
        }
        MatrixUtil.remoteRecordBatch(task.getTargetScript(), data.toString());
        return "total=" + ex.count;
    }
}

19 View Complete Implementation : ByDayETLJob.java
Copyright Apache License 2.0
Author : boubei-com
/**
 * 按天抽取Job,可以配置多个,在不同时间点触发。
 */
@DisallowConcurrentExecution
public clreplaced ByDayETLJob extends AbstractETLJob {

    protected String etlType() {
        return "byDay";
    }

    protected List<String> getExsitDays(Long taskId) {
        String hql = "select distinct dataDay from TaskLog where taskId = ? and exception='no'";
        List<String> exsitDays = new ArrayList<String>();
        List<?> list = commonService.getList(hql, taskId);
        for (Object obj : list) {
            exsitDays.add((String) obj);
        }
        return exsitDays;
    }

    /*
	 * 注:今天的数据也会汇总一份,所以 repeatDays 里至少应大于等于 1 
	 */
    public TaskLog excuteTask(Task task) {
        // 获取已经存在的日结日期 exsitDays
        List<String> exsitDays = getExsitDays(task.getId());
        // 默认是今天24点
        Date currDay = DateUtil.noHMS(task.getStartDay());
        Set<Date> dateList = new LinkedHashSet<Date>();
        List<Date> repeatList = new ArrayList<Date>();
        // 包含今天,今天的数据也会汇总一份
        Date today = DateUtil.addDays(DateUtil.today(), 1);
        while (currDay.before(today)) {
            if (!exsitDays.contains(DateUtil.format(currDay))) {
                // 缺失的天
                dateList.add(currDay);
            }
            currDay = DateUtil.addDays(currDay, 1);
        }
        int repeats = EasyUtils.obj2Int(task.getRepeatDays());
        while (repeats > 0) {
            currDay = DateUtil.subDays(today, repeats);
            dateList.add(currDay);
            repeatList.add(currDay);
            repeats--;
        }
        log.info(task.getName() + " is starting! total days = " + dateList.size() + "");
        long start = System.currentTimeMillis();
        int index = 0;
        TaskLog tLog = null;
        for (final Date day : dateList) {
            tLog = new TaskLog(task);
            // 记录执行日期
            tLog.setDataDay(DateUtil.format(day));
            try {
                long startTime = System.currentTimeMillis();
                String result = etlByDay(task, day, repeatList, index == 0);
                tLog.setException("no");
                tLog.setDetail(result);
                tLog.setRunningMS(System.currentTimeMillis() - startTime);
            } catch (Exception e) {
                setException(tLog, task, e);
                return tLog;
            } finally {
                // 记录任务日志,不管是否成功
                commonService.create(tLog);
                index++;
            }
        }
        log.info("Done! Cost time: " + (System.currentTimeMillis() - start));
        return tLog;
    }

    /* 按天ETL */
    protected String etlByDay(Task task, Date day, List<Date> repeatList, boolean isFirstDay) {
        // 判断是否重新抽取以更新当前日期的数据,是的话先清除已存在的改天数据
        String preRepeatSQL = task.getPreRepeatSQL();
        if (!EasyUtils.isNullOrEmpty(preRepeatSQL)) {
            if (repeatList.contains(day)) {
                Map<Integer, Object> params = new HashMap<Integer, Object>();
                params.put(1, new Timestamp(day.getTime()));
                SQLExcutor.excute(preRepeatSQL, params, task.getTargetDS());
            } else if (repeatList.isEmpty()) {
                // eg: truncate table, repeat = 0
                SQLExcutor.excute(preRepeatSQL, task.getTargetDS());
            }
        }
        Report report;
        String source = task.getSourceScript();
        try {
            Long reportId = EasyUtils.obj2Long(source);
            report = reportService.getReport(reportId, false);
        } catch (Exception e) {
            report = new Report();
            report.setName(task.getName());
            report.setDatasource(task.getSourceDS());
            report.setScript(task.getSourceScript());
            report.setParam("[{'label':'fromDay', 'type':'date'}, {'label':'toDay', 'type':'date'}]");
        }
        Map<String, String> paramsMap = new HashMap<String, String>();
        paramsMap.put("param1", DateUtil.format(day));
        paramsMap.put("param2", DateUtil.format(DateUtil.addDays(day, 1)));
        SQLExcutor ex = ReportQuery.excute(report, paramsMap, 1, 1);
        int total = ex.count;
        int totalPages = PageInfo.calTotalPages(total, PAGE_SIZE);
        // 分页查询,批量插入
        String target = task.getTargetScript();
        for (int pageNum = 1; pageNum <= totalPages; pageNum++) {
            // 每次循环开始前先检查任务是否被人为关停了
            checkTask(task.getId());
            ex = ReportQuery.excute(report, paramsMap, pageNum, PAGE_SIZE);
            List<Map<Integer, Object>> list1 = new ArrayList<Map<Integer, Object>>();
            List<Map<String, String>> list2 = new ArrayList<Map<String, String>>();
            for (Map<String, Object> row : ex.result) {
                Map<Integer, Object> item1 = new HashMap<Integer, Object>();
                Map<String, String> item2 = new HashMap<String, String>();
                for (int i = 0; i < ex.selectFields.size(); i++) {
                    String field = ex.selectFields.get(i);
                    Object value = row.get(field);
                    item1.put(i + 1, value);
                    item2.put(field, EasyUtils.obj2String(value));
                }
                list1.add(item1);
                list2.add(item2);
            }
            // check target is a RecordTable or SQL, if SQL, this will throw exception, then do catch { ... }
            if (target.trim().split(" ").length == 1) {
                Long recordId = recordService.getRecordID(target, false);
                _Database db = recordService.getDB(recordId);
                db.insertBatch(list2);
            } else {
                SQLExcutor.excuteBatch(target, list1, task.getTargetDS());
            }
        }
        return "total=" + total;
    }
}

19 View Complete Implementation : DeleteNotificationTaskExecutor.java
Copyright MIT License
Author : bcvsolutions
/**
 * Delete notifications.
 *
 * @author Radek Tomiška
 * @since 9.7.12
 */
@Service(DeleteNotificationTaskExecutor.TASK_NAME)
@DisallowConcurrentExecution
@Description("Delete notifications.")
public clreplaced DeleteNotificationTaskExecutor extends AbstractSchedulableStatefulExecutor<IdmNotificationLogDto> {

    private static final org.slf4j.Logger LOG = org.slf4j.LoggerFactory.getLogger(DeleteNotificationTaskExecutor.clreplaced);

    public static final String TASK_NAME = "core-delete-notification-long-running-task";

    // events older than
    public static final String PARAMETER_NUMBER_OF_DAYS = "numberOfDays";

    // sent notification
    public static final String PARAMETER_SENT_ONLY = "sentOnly";

    // 
    // half year by default
    public static final int DEFAULT_NUMBER_OF_DAYS = 180;

    public static final boolean DEFAULT_SENT_ONLY = true;

    // 
    @Autowired
    private IdmNotificationLogService service;

    // 
    // optional
    private int numberOfDays = 0;

    // optional
    private boolean sentOnly;

    @Override
    public String getName() {
        return TASK_NAME;
    }

    @Override
    public void init(Map<String, Object> properties) {
        super.init(properties);
        // 
        Long givenNumberOfDays = getParameterConverter().toLong(properties, PARAMETER_NUMBER_OF_DAYS);
        if (givenNumberOfDays != null) {
            numberOfDays = Math.toIntExact(givenNumberOfDays);
        } else {
            numberOfDays = 0;
        }
        sentOnly = getParameterConverter().toBoolean(properties, PARAMETER_SENT_ONLY, DEFAULT_SENT_ONLY);
    }

    @Override
    protected boolean start() {
        LOG.warn("Start deleting notifications older than [{}] days [sentOnly: {}].", numberOfDays, sentOnly);
        // 
        return super.start();
    }

    @Override
    protected Boolean end(Boolean result, Exception ex) {
        result = super.end(result, ex);
        LOG.warn("End deleting notifications older than [{}] days [sent only: {}]. Processed notifications [{}].", numberOfDays, sentOnly, counter);
        return result;
    }

    @Override
    public Page<IdmNotificationLogDto> gereplacedemsToProcess(Pageable pageable) {
        IdmNotificationFilter filter = new IdmNotificationFilter();
        if (sentOnly) {
            filter.setState(NotificationState.ALL);
            filter.setSent(Boolean.TRUE);
        }
        if (numberOfDays > 0) {
            filter.setTill(LocalDate.now().atStartOfDay(ZoneId.systemDefault()).minusDays(numberOfDays));
        }
        return service.find(filter, PageRequest.of(0, pageable.getPageSize(), new Sort(Direction.ASC, IdmNotificationLog_.parent.getName())));
    // new pageable is given => records are deleted and we need the first page all time
    }

    @Override
    public Optional<OperationResult> processItem(IdmNotificationLogDto dto) {
        if (service.get(dto) != null) {
            // child notification can be deleted before.
            service.delete(dto);
        }
        // 
        return Optional.of(new OperationResult.Builder(OperationState.EXECUTED).build());
    }

    @Override
    public List<String> getPropertyNames() {
        List<String> parameters = super.getPropertyNames();
        parameters.add(PARAMETER_NUMBER_OF_DAYS);
        parameters.add(PARAMETER_SENT_ONLY);
        // 
        return parameters;
    }

    @Override
    public Map<String, Object> getProperties() {
        Map<String, Object> properties = super.getProperties();
        properties.put(PARAMETER_NUMBER_OF_DAYS, numberOfDays);
        properties.put(PARAMETER_SENT_ONLY, sentOnly);
        // 
        return properties;
    }

    @Override
    public List<IdmFormAttributeDto> getFormAttributes() {
        IdmFormAttributeDto numberOfDaysAttribute = new IdmFormAttributeDto(PARAMETER_NUMBER_OF_DAYS, PARAMETER_NUMBER_OF_DAYS, PersistentType.LONG);
        numberOfDaysAttribute.setDefaultValue(String.valueOf(DEFAULT_NUMBER_OF_DAYS));
        // 
        IdmFormAttributeDto sentAttribute = new IdmFormAttributeDto(PARAMETER_SENT_ONLY, PARAMETER_SENT_ONLY, PersistentType.BOOLEAN);
        sentAttribute.setDefaultValue(String.valueOf(DEFAULT_SENT_ONLY));
        // 
        return Lists.newArrayList(numberOfDaysAttribute, sentAttribute);
    }

    @Override
    public boolean supportsQueue() {
        return false;
    }

    @Override
    public boolean supportsDryRun() {
        // TODO: get context (or LRT) in gereplacedems to process ...
        return false;
    }

    @Override
    public boolean requireNewTransaction() {
        return true;
    }
}

19 View Complete Implementation : AssetExtractionCleanupJob.java
Copyright Apache License 2.0
Author : box
/**
 * @author aloison
 */
@Profile("!disablescheduling")
@Configuration
@Component
@DisallowConcurrentExecution
public clreplaced replacedetExtractionCleanupJob implements Job {

    /**
     * logger
     */
    static Logger logger = LoggerFactory.getLogger(replacedetExtractionCleanupJob.clreplaced);

    @Autowired
    replacedetExtractionCleanupService replacedetExtractionCleanupService;

    @Override
    public void execute(JobExecutionContext context) throws JobExecutionException {
        logger.debug("Cleanup replacedet extraction");
        replacedetExtractionCleanupService.cleanupOldreplacedetExtractions();
    }

    @Bean(name = "jobDetailreplacedetExtractionCleanup")
    public JobDetailFactoryBean jobDetailreplacedetExtractionCleanup() {
        JobDetailFactoryBean jobDetailFactory = new JobDetailFactoryBean();
        jobDetailFactory.setJobClreplaced(replacedetExtractionCleanupJob.clreplaced);
        jobDetailFactory.setDescription("Cleanup old replacedet extraction");
        jobDetailFactory.setDurability(true);
        return jobDetailFactory;
    }

    @Bean
    public SimpleTriggerFactoryBean triggerreplacedetExtractionCleanup(@Qualifier("jobDetailreplacedetExtractionCleanup") JobDetail job) {
        SimpleTriggerFactoryBean trigger = new SimpleTriggerFactoryBean();
        trigger.setJobDetail(job);
        trigger.setRepeatInterval(300000);
        trigger.setRepeatCount(SimpleTrigger.REPEAT_INDEFINITELY);
        return trigger;
    }
}

19 View Complete Implementation : RemoteJob.java
Copyright Apache License 2.0
Author : apache
@DisallowConcurrentExecution
public clreplaced RemoteJob implements Job {

    private static final Logger log = getLogger(RemoteJob.clreplaced);

    public static final String CORE_KEY = "core";

    private Robot robot = null;

    private ScreenDimensions dim = null;

    public RemoteJob() {
        try {
            robot = new Robot();
            robot.setAutoDelay(5);
        } catch (AWTException e) {
            log.error("Unexpected error while creating Robot", e);
        }
    }

    @Override
    public void execute(JobExecutionContext context) throws JobExecutionException {
        JobDataMap data = context.getJobDetail().getJobDataMap();
        Core core = (Core) data.get(CORE_KEY);
        if (dim == null) {
            dim = core.getDim();
        }
        try {
            Map<String, Object> obj;
            while ((obj = core.getRemoteEvents().poll(1, TimeUnit.MILLISECONDS)) != null) {
                String action = String.valueOf(obj.get("action"));
                log.trace("Action polled:: {}, count: {}", action, core.getRemoteEvents().size());
                switch(action) {
                    case "mouseUp":
                        {
                            Point p = getCoordinates(obj);
                            robot.mouseMove(p.x, p.y);
                            robot.mouseRelease(InputEvent.BUTTON1_DOWN_MASK);
                        }
                        break;
                    case "mouseDown":
                        {
                            Point p = getCoordinates(obj);
                            robot.mouseMove(p.x, p.y);
                            robot.mousePress(InputEvent.BUTTON1_DOWN_MASK);
                        }
                        break;
                    case "mousePos":
                        {
                            Point p = getCoordinates(obj);
                            robot.mouseMove(p.x, p.y);
                        }
                        break;
                    case "keyDown":
                        new OmKeyEvent(obj).press(this);
                        break;
                    case "paste":
                        paste(String.valueOf(obj.get("paste")));
                        break;
                    case "copy":
                        {
                            String paste = getHighlightedText();
                            Map<Integer, String> map = new HashMap<>();
                            map.put(0, "copiedText");
                            map.put(1, paste);
                            String uid = String.valueOf(obj.get("uid"));
                            core.getInstance().invoke("sendMessageToClient", new Object[] { uid, map }, core);
                        }
                        break;
                }
            }
        } catch (Exception err) {
            log.error("[sendRemoteCursorEvent]", err);
        }
    }

    public void press(List<Integer> codes) {
        log.debug("sequence:: codes {}", codes);
        press(codes.stream().mapToInt(Integer::intValue).toArray());
    }

    public void press(int... codes) {
        for (int i = 0; i < codes.length; ++i) {
            robot.keyPress(codes[i]);
        }
        for (int i = codes.length - 1; i > -1; --i) {
            robot.keyRelease(codes[i]);
        }
    }

    private String getHighlightedText() {
        try {
            if (SystemUtils.IS_OS_MAC) {
                // Macintosh simulate Copy
                press(157, 67);
            } else {
                // pressing CTRL+C == copy
                press(KeyEvent.VK_CONTROL, KeyEvent.VK_C);
            }
            return getClipboardText();
        } catch (Exception e) {
            log.error("Unexpected exception while getting highlighted text", e);
        }
        return "";
    }

    public String getClipboardText() {
        try {
            // get the contents on the clipboard in a transferable object
            Transferable data = getDefaultToolkit().getSystemClipboard().getContents(null);
            // check if clipboard is empty
            if (data == null) {
            // Clipboard is empty!!!
            } else if (data.isDataFlavorSupported(stringFlavor)) {
                // see if DataFlavor of DataFlavor.stringFlavor is supported return text content
                return (String) data.getTransferData(stringFlavor);
            }
        } catch (Exception e) {
            log.error("Unexpected exception while getting clipboard text", e);
        }
        return "";
    }

    private void paste(String charValue) {
        Clipboard clippy = getDefaultToolkit().getSystemClipboard();
        try {
            Transferable transferableText = new StringSelection(charValue);
            clippy.setContents(transferableText, null);
            if (SystemUtils.IS_OS_MAC) {
                // Macintosh simulate Insert
                press(157, 86);
            } else {
                // pressing CTRL+V == insert-mode
                press(KeyEvent.VK_CONTROL, KeyEvent.VK_V);
            }
        } catch (Exception e) {
            log.error("Unexpected exception while pressSpecialSign", e);
        }
    }

    private Point getCoordinates(Map<String, Object> obj) {
        float scaleFactorX = ((float) dim.getSpinnerWidth()) / dim.getResizeX();
        float scaleFactorY = ((float) dim.getSpinnerHeight()) / dim.getResizeY();
        int x = Math.round(scaleFactorX * getFloat(obj, "x") + dim.getSpinnerX());
        int y = Math.round(scaleFactorY * getFloat(obj, "y") + dim.getSpinnerY());
        return new Point(x, y);
    }
}

19 View Complete Implementation : BranchNotificationMissingScreenshotsJob.java
Copyright Apache License 2.0
Author : box
/**
 * Send the screenshot missing notification for a branch if needed.
 *
 * @author jaurambault
 */
@Component
@DisallowConcurrentExecution
public clreplaced BranchNotificationMissingScreenshotsJob extends SchedulableJob {

    /**
     * logger
     */
    static Logger logger = LoggerFactory.getLogger(BranchNotificationMissingScreenshotsJob.clreplaced);

    static final String BRANCH_ID = "branchId";

    static final String SENDER_TYPE = "senderType";

    @Autowired
    BranchNotificationService branchNotificationService;

    @Override
    public void execute(JobExecutionContext context) throws JobExecutionException {
        Long branchId = context.getMergedJobDataMap().getLong(BRANCH_ID);
        String senderType = context.getMergedJobDataMap().getString(SENDER_TYPE);
        logger.debug("execute for branchId: {} and sender type: {}", branchId, senderType);
        branchNotificationService.sendMissingScreenshotNotificationForBranch(branchId, senderType);
    }

    @Override
    protected String getDescription() {
        return "Send the screenshot missing notification for a branch if applicable";
    }

    public void schedule(Long branchId, String senderType, Date triggerStartDate) {
        JobDataMap jobDataMap = new JobDataMap();
        jobDataMap.put(BRANCH_ID, branchId.toString());
        jobDataMap.put(SENDER_TYPE, senderType);
        schedule(jobDataMap, triggerStartDate, BRANCH_ID);
    }
}

19 View Complete Implementation : HrContractExclusionProcess.java
Copyright MIT License
Author : bcvsolutions
/**
 * HR process - idenreplacedy's contract exclusion. The processes is started for
 * contracts that are both valid (meaning validFrom and validTill) and excluded.
 *
 * "hrContractExclusion" can be configured as process workflow.
 *
 * @author Jan Helbich
 * @author Radek Tomiška
 * @since 7.5.1
 */
@Service
@Description("HR process - contract exclusion")
@DisallowConcurrentExecution
public clreplaced HrContractExclusionProcess extends AbstractHrProcess {

    @Autowired
    private IdmIdenreplacedyContractService idenreplacedyContractService;

    @Autowired
    private IdenreplacedyContractExclusionProcessor idenreplacedyContractExclusionProcessor;

    public HrContractExclusionProcess() {
    }

    public HrContractExclusionProcess(boolean skipAutomaticRoleRecalculation) {
        super(skipAutomaticRoleRecalculation);
    }

    @Override
    public boolean continueOnException() {
        return true;
    }

    @Override
    public boolean requireNewTransaction() {
        return true;
    }

    /**
     * {@inheritDoc}
     *
     * Find all idenreplacedy contracts, that are both valid and disabled.
     */
    @Override
    public Page<IdmIdenreplacedyContractDto> gereplacedemsToProcess(Pageable pageable) {
        IdmIdenreplacedyContractFilter filter = new IdmIdenreplacedyContractFilter();
        filter.setValid(Boolean.TRUE);
        filter.setState(ContractState.EXCLUDED);
        return idenreplacedyContractService.find(filter, pageable);
    }

    @Override
    public Optional<OperationResult> processItem(IdmIdenreplacedyContractDto dto) {
        if (!StringUtils.isEmpty(getWorkflowName())) {
            // wf is configured - execute wf instance
            return super.processItem(dto);
        }
        return Optional.of(idenreplacedyContractExclusionProcessor.process(dto, isSkipAutomaticRoleRecalculation()));
    }
}

19 View Complete Implementation : PluralFormUpdaterJob.java
Copyright Apache License 2.0
Author : box
/**
 * This is to update all text units with missing plural form since the
 * introduction of the new plural form support.
 * <p>
 * Instead of a scheduler it could be called during replacedet extraction but this
 * way don't impact the standard workflow.
 * <p>
 * This task could be removed later when everything as been migrated.
 *
 * @author jaurambault
 */
@Profile("!disablescheduling")
@Configuration
@Component
@DisallowConcurrentExecution
@ConditionalOnProperty(value = "l10n.PluralFormUpdater", havingValue = "true")
public clreplaced PluralFormUpdaterJob implements Job {

    /**
     * logger
     */
    static Logger logger = LoggerFactory.getLogger(PluralFormUpdaterJob.clreplaced);

    @Autowired
    JdbcTemplate jdbcTemplate;

    @Autowired
    DBUtils dbUtils;

    @Override
    public void execute(JobExecutionContext context) throws JobExecutionException {
        if (dbUtils.isHSQL()) {
            logger.debug("Don't update (DB is HSQL)");
        } else {
            logger.debug("Update old text unit with plural form that are now avaible with new plural support");
            try {
                int updateCount = jdbcTemplate.update("" + "update tm_text_unit tu, (\n" + "    select tu.id as tu_id, atu.plural_form_id as plural_form_id, atu.plural_form_other as plural_form_other \n" + "    from tm_text_unit tu\n" + "    inner join replacedet_text_unit_to_tm_text_unit map on map.tm_text_unit_id = tu.id\n" + "    inner join replacedet_text_unit atu on map.replacedet_text_unit_id = atu.id\n" + "    where \n" + "        tu.plural_form_id is null and atu.plural_form_id is not null\n" + "    ) d\n" + "set tu.plural_form_id = d.plural_form_id, tu.plural_form_other =  d.plural_form_other " + "where tu.id = d.tu_id");
                logger.debug("TmTextUnit update count: {}", updateCount);
            } catch (Exception e) {
                logger.error("Couldn't update plural forms, ignore", e);
            }
        }
    }

    @Bean(name = "jobDetailPluralFromUpdater")
    JobDetailFactoryBean jobDetailPluralFromUpdater() {
        JobDetailFactoryBean jobDetailFactory = new JobDetailFactoryBean();
        jobDetailFactory.setJobClreplaced(PluralFormUpdaterJob.clreplaced);
        jobDetailFactory.setDescription("Update plural forms in text units");
        jobDetailFactory.setDurability(true);
        return jobDetailFactory;
    }

    @Bean
    SimpleTriggerFactoryBean triggerPluralFormUpdater(@Qualifier("jobDetailPluralFromUpdater") JobDetail job) {
        SimpleTriggerFactoryBean trigger = new SimpleTriggerFactoryBean();
        trigger.setJobDetail(job);
        trigger.setRepeatInterval(10000);
        trigger.setRepeatCount(SimpleTrigger.REPEAT_INDEFINITELY);
        return trigger;
    }
}

19 View Complete Implementation : JobInstance.java
Copyright Apache License 2.0
Author : apache
@PersistJobDataAfterExecution
@DisallowConcurrentExecution
public clreplaced JobInstance implements Job {

    private static final Logger LOGGER = LoggerFactory.getLogger(JobInstance.clreplaced);

    public static final String MEASURE_KEY = "measure";

    public static final String PREDICATES_KEY = "predicts";

    public static final String PREDICATE_JOB_NAME = "predicateJobName";

    private static final String TRIGGER_KEY = "trigger";

    static final String JOB_NAME = "jobName";

    static final String PATH_CONNECTOR_CHARACTER = ",";

    public static final String INTERVAL = "interval";

    public static final String REPEAT = "repeat";

    public static final String CHECK_DONEFILE_SCHEDULE = "checkdonefile.schedule";

    @Autowired
    @Qualifier("schedulerFactoryBean")
    private SchedulerFactoryBean factory;

    @Autowired
    private GriffinMeasureRepo measureRepo;

    @Autowired
    private JobRepo<AbstractJob> jobRepo;

    @Autowired
    private JobInstanceRepo instanceRepo;

    @Autowired
    private Environment env;

    private GriffinMeasure measure;

    private AbstractJob job;

    private List<SegmentPredicate> mPredicates;

    private Long jobStartTime;

    @Override
    @Transactional
    public void execute(JobExecutionContext context) {
        try {
            initParam(context);
            setSourcesParreplacedionsAndPredicates(measure.getDataSources());
            createJobInstance(job.getConfigMap());
        } catch (Exception e) {
            LOGGER.error("Create predicate job failure.", e);
        }
    }

    private void initParam(JobExecutionContext context) throws SchedulerException {
        mPredicates = new ArrayList<>();
        JobDetail jobDetail = context.getJobDetail();
        Long jobId = jobDetail.getJobDataMap().getLong(GRIFFIN_JOB_ID);
        job = jobRepo.findOne(jobId);
        Long measureId = job.getMeasureId();
        measure = measureRepo.findOne(measureId);
        setJobStartTime(jobDetail);
        if (job.getConfigMap() == null) {
            job.setConfigMap(new HashMap<>());
        }
        job.getConfigMap().put(TRIGGER_KEY, context.getTrigger().getKey().toString());
    }

    @SuppressWarnings("unchecked")
    private void setJobStartTime(JobDetail jobDetail) throws SchedulerException {
        Scheduler scheduler = factory.getScheduler();
        JobKey jobKey = jobDetail.getKey();
        List<Trigger> triggers = (List<Trigger>) scheduler.getTriggersOfJob(jobKey);
        Date triggerTime = triggers.get(0).getPreviousFireTime();
        jobStartTime = triggerTime.getTime();
    }

    private void setSourcesParreplacedionsAndPredicates(List<DataSource> sources) throws Exception {
        boolean isFirstBaseline = true;
        for (JobDataSegment jds : job.getSegments()) {
            if (jds.isAsTsBaseline() && isFirstBaseline) {
                Long tsOffset = TimeUtil.str2Long(jds.getSegmentRange().getBegin());
                measure.setTimestamp(jobStartTime + tsOffset);
                isFirstBaseline = false;
            }
            for (DataSource ds : sources) {
                setDataSourceParreplacedions(jds, ds);
            }
        }
    }

    private void setDataSourceParreplacedions(JobDataSegment jds, DataSource ds) throws Exception {
        List<DataConnector> connectors = ds.getConnectors();
        for (DataConnector dc : connectors) {
            setDataConnectorParreplacedions(jds, dc);
        }
    }

    private void setDataConnectorParreplacedions(JobDataSegment jds, DataConnector dc) throws Exception {
        String dcName = jds.getDataConnectorName();
        if (dcName.equals(dc.getName())) {
            Long[] sampleTs = genSampleTs(jds.getSegmentRange(), dc);
            setConnectorConf(dc, sampleTs);
            setConnectorPredicates(dc, sampleTs);
        }
    }

    /**
     * split data into several part and get every part start timestamp
     *
     * @param segRange config of data
     * @param dc       data connector
     * @return split timestamps of data
     */
    private Long[] genSampleTs(SegmentRange segRange, DataConnector dc) {
        Long offset = TimeUtil.str2Long(segRange.getBegin());
        Long range = TimeUtil.str2Long(segRange.getLength());
        String unit = dc.getDataUnit();
        Long dataUnit = TimeUtil.str2Long(StringUtils.isEmpty(unit) ? dc.getDefaultDataUnit() : unit);
        // offset usually is negative
        Long dataStartTime = jobStartTime + offset;
        if (range < 0) {
            dataStartTime += range;
            range = Math.abs(range);
        }
        if (Math.abs(dataUnit) >= range || dataUnit == 0) {
            return new Long[] { dataStartTime };
        }
        int count = (int) (range / dataUnit);
        Long[] timestamps = new Long[count];
        for (int index = 0; index < count; index++) {
            timestamps[index] = dataStartTime + index * dataUnit;
        }
        return timestamps;
    }

    /**
     * set data connector predicates
     *
     * @param dc       data connector
     * @param sampleTs collection of data split start timestamp
     */
    private void setConnectorPredicates(DataConnector dc, Long[] sampleTs) {
        List<SegmentPredicate> predicates = dc.getPredicates();
        for (SegmentPredicate predicate : predicates) {
            genConfMap(predicate.getConfigMap(), sampleTs, dc.getDataTimeZone());
            // Do not forget to update origin string config
            predicate.setConfigMap(predicate.getConfigMap());
            mPredicates.add(predicate);
        }
    }

    private void setConnectorConf(DataConnector dc, Long[] sampleTs) {
        genConfMap(dc.getConfigMap(), sampleTs, dc.getDataTimeZone());
        dc.setConfigMap(dc.getConfigMap());
    }

    /**
     * @param conf     config map
     * @param sampleTs collection of data split start timestamp
     * @return all config data combine,like {"where": "year=2017 AND month=11
     * AND dt=15 AND hour=09,year=2017 AND month=11 AND
     * dt=15 AND hour=10"}
     * or like {"path": "/year=2017/month=11/dt=15/hour=09/_DONE
     * ,/year=2017/month=11/dt=15/hour=10/_DONE"}
     */
    private void genConfMap(Map<String, Object> conf, Long[] sampleTs, String timezone) {
        if (conf == null) {
            LOGGER.warn("Predicate config is null.");
            return;
        }
        for (Map.Entry<String, Object> entry : conf.entrySet()) {
            // in case entry value is a json object instead of a string
            if (entry.getValue() instanceof String) {
                String value = (String) entry.getValue();
                Set<String> set = new HashSet<>();
                if (StringUtils.isEmpty(value)) {
                    continue;
                }
                for (Long timestamp : sampleTs) {
                    set.add(TimeUtil.format(value, timestamp, TimeUtil.getTimeZone(timezone)));
                }
                conf.put(entry.getKey(), StringUtils.join(set, PATH_CONNECTOR_CHARACTER));
            }
        }
    }

    @SuppressWarnings("unchecked")
    private void createJobInstance(Map<String, Object> confMap) throws Exception {
        confMap = checkConfMap(confMap != null ? confMap : new HashMap<>());
        Map<String, Object> config = (Map<String, Object>) confMap.get(CHECK_DONEFILE_SCHEDULE);
        Long interval = TimeUtil.str2Long((String) config.get(INTERVAL));
        Integer repeat = Integer.valueOf(config.get(REPEAT).toString());
        String groupName = "PG";
        String jobName = job.getJobName() + "_predicate_" + System.currentTimeMillis();
        TriggerKey tk = triggerKey(jobName, groupName);
        if (factory.getScheduler().checkExists(tk)) {
            throw new GriffinException.ConflictException(QUARTZ_JOB_ALREADY_EXIST);
        }
        String triggerKey = (String) confMap.get(TRIGGER_KEY);
        saveJobInstance(jobName, groupName, triggerKey);
        createJobInstance(tk, interval, repeat, jobName);
    }

    @SuppressWarnings("unchecked")
    Map<String, Object> checkConfMap(Map<String, Object> confMap) {
        Map<String, Object> config = (Map<String, Object>) confMap.get(CHECK_DONEFILE_SCHEDULE);
        String interval = env.getProperty("predicate.job.interval");
        interval = interval != null ? interval : "5m";
        String repeat = env.getProperty("predicate.job.repeat.count");
        repeat = repeat != null ? repeat : "12";
        if (config == null) {
            Map<String, Object> map = new HashMap<>();
            map.put(INTERVAL, interval);
            map.put(REPEAT, repeat);
            confMap.put(CHECK_DONEFILE_SCHEDULE, map);
        } else {
            // replace if interval or repeat is not null
            String confRepeat = config.get(REPEAT).toString();
            String confInterval = config.get(INTERVAL).toString();
            interval = confInterval != null ? confInterval : interval;
            repeat = confRepeat != null ? confRepeat : repeat;
            config.put(INTERVAL, interval);
            config.put(REPEAT, repeat);
        }
        return confMap;
    }

    private void saveJobInstance(String pName, String pGroup, String triggerKey) {
        ProcessType type = measure.getProcessType() == BATCH ? BATCH : STREAMING;
        Long tms = System.currentTimeMillis();
        String expired = env.getProperty("jobInstance.expired.milliseconds");
        Long expireTms = Long.valueOf(expired != null ? expired : "604800000") + tms;
        JobInstanceBean instance = new JobInstanceBean(FINDING, pName, pGroup, tms, expireTms, type);
        instance.setJob(job);
        instance.setTriggerKey(triggerKey);
        instanceRepo.save(instance);
    }

    private void createJobInstance(TriggerKey tk, Long interval, Integer repeatCount, String pJobName) throws Exception {
        JobDetail jobDetail = addJobDetail(tk, pJobName);
        Trigger trigger = genTriggerInstance(tk, jobDetail, interval, repeatCount);
        factory.getScheduler().scheduleJob(trigger);
    }

    private Trigger genTriggerInstance(TriggerKey tk, JobDetail jd, Long interval, Integer repeatCount) {
        return newTrigger().withIdenreplacedy(tk).forJob(jd).startNow().withSchedule(simpleSchedule().withIntervalInMilliseconds(interval).withRepeatCount(repeatCount)).build();
    }

    private JobDetail addJobDetail(TriggerKey tk, String pJobName) throws SchedulerException, IOException {
        Scheduler scheduler = factory.getScheduler();
        JobKey jobKey = jobKey(tk.getName(), tk.getGroup());
        JobDetail jobDetail;
        Boolean isJobKeyExist = scheduler.checkExists(jobKey);
        if (isJobKeyExist) {
            jobDetail = scheduler.getJobDetail(jobKey);
        } else {
            jobDetail = newJob(SparkSubmitJob.clreplaced).storeDurably().withIdenreplacedy(jobKey).build();
        }
        setJobDataMap(jobDetail, pJobName);
        scheduler.addJob(jobDetail, isJobKeyExist);
        return jobDetail;
    }

    private void setJobDataMap(JobDetail jobDetail, String pJobName) throws IOException {
        JobDataMap dataMap = jobDetail.getJobDataMap();
        preProcessMeasure();
        String result = toJson(measure);
        dataMap.put(MEASURE_KEY, result);
        dataMap.put(PREDICATES_KEY, toJson(mPredicates));
        dataMap.put(JOB_NAME, job.getJobName());
        dataMap.put(PREDICATE_JOB_NAME, pJobName);
    }

    private void preProcessMeasure() throws IOException {
        for (DataSource source : measure.getDataSources()) {
            Map cacheMap = source.getCheckpointMap();
            // to skip batch job
            if (cacheMap == null) {
                return;
            }
            String cache = toJson(cacheMap);
            cache = cache.replaceAll("\\$\\{JOB_NAME}", job.getJobName());
            cache = cache.replaceAll("\\$\\{SOURCE_NAME}", source.getName());
            cache = cache.replaceAll("\\$\\{TARGET_NAME}", source.getName());
            cacheMap = toEnreplacedy(cache, Map.clreplaced);
            source.setCheckpointMap(cacheMap);
        }
    }
}

19 View Complete Implementation : RepositoryStatisticsCronJob.java
Copyright Apache License 2.0
Author : box
/**
 * Update {@link RepositoryStatistic}s on a regular basis.
 * <p>
 * This is required to re-compute OOSLA information. It will be also useful
 * when adding new statistics and have them recomputed automatically. Before
 * we'd to wait for a change in the repository.
 *
 * @author jaurambault
 */
@Profile("!disablescheduling")
@ConditionalOnProperty(value = "l10n.repositoryStatistics.scheduler.cron")
@Configuration
@Component
@DisallowConcurrentExecution
public clreplaced RepositoryStatisticsCronJob implements Job {

    /**
     * logger
     */
    static Logger logger = LoggerFactory.getLogger(RepositoryStatisticsCronJob.clreplaced);

    @Autowired
    RepositoryRepository repositoryRepository;

    @Lazy
    @Autowired
    RepositoryStatisticsJob repositoryStatisticsJob;

    @Value("${l10n.repositoryStatistics.scheduler.cron}")
    String cron;

    @Override
    public void execute(JobExecutionContext context) throws JobExecutionException {
        logger.debug("Sets repository stats as out of date");
        List<Repository> repositories = repositoryRepository.findByDeletedFalseOrderByNameAsc();
        for (Repository repository : repositories) {
            repositoryStatisticsJob.schedule(repository.getId());
        }
    }

    @Bean(name = "repositoryStatisticsCron")
    public JobDetailFactoryBean jobDetailRepositoryStatisticsCron() {
        JobDetailFactoryBean jobDetailFactory = new JobDetailFactoryBean();
        jobDetailFactory.setJobClreplaced(RepositoryStatisticsCronJob.clreplaced);
        jobDetailFactory.setDescription("Mark repository as out of data to later recompute stats");
        jobDetailFactory.setDurability(true);
        return jobDetailFactory;
    }

    @Bean
    public CronTriggerFactoryBean triggerRepositoryStatisticsCron(@Qualifier("repositoryStatisticsCron") JobDetail job) {
        CronTriggerFactoryBean trigger = new CronTriggerFactoryBean();
        trigger.setCronExpression(cron);
        trigger.setJobDetail(job);
        return trigger;
    }
}

19 View Complete Implementation : IdentityContractExpirationTaskExecutor.java
Copyright MIT License
Author : bcvsolutions
/**
 * Remove roles by expired idenreplacedy contracts (=> removes replacedigned roles).
 *
 * @author Radek Tomiška
 */
@Service
@DisallowConcurrentExecution
@Description("Remove roles by expired idenreplacedy contracts (=> removes replacedigned roles).")
public clreplaced IdenreplacedyContractExpirationTaskExecutor extends AbstractSchedulableStatefulExecutor<IdmIdenreplacedyContractDto> {

    private static final org.slf4j.Logger LOG = org.slf4j.LoggerFactory.getLogger(IdenreplacedyContractExpirationTaskExecutor.clreplaced);

    // 
    @Autowired
    private IdmIdenreplacedyContractService idenreplacedyContractService;

    @Autowired
    private IdmIdenreplacedyRoleService idenreplacedyRoleService;

    // 
    private LocalDate expiration;

    @Override
    public void init(Map<String, Object> properties) {
        super.init(properties);
        // 
        expiration = LocalDate.now();
        LOG.debug("Remove roles  expired idenreplacedy contracts was inintialized for expiration less than [{}]", expiration);
    }

    @Override
    public Page<IdmIdenreplacedyContractDto> gereplacedemsToProcess(Pageable pageable) {
        return idenreplacedyContractService.findExpiredContracts(expiration, pageable);
    }

    @Override
    public boolean continueOnException() {
        return true;
    }

    @Override
    public boolean requireNewTransaction() {
        return true;
    }

    @Override
    public Optional<OperationResult> processItem(IdmIdenreplacedyContractDto dto) {
        LOG.info("Remove roles by expired idenreplacedy contract [{}]. Contract ended for expiration less than [{}]", dto.getId(), expiration);
        try {
            IdmIdenreplacedyRoleFilter filter = new IdmIdenreplacedyRoleFilter();
            filter.setIdenreplacedyContractId(dto.getId());
            filter.setDirectRole(Boolean.TRUE);
            // remove all referenced roles
            idenreplacedyRoleService.find(filter, null).forEach(idenreplacedyRole -> {
                idenreplacedyRoleService.delete(idenreplacedyRole);
            });
            return Optional.of(new OperationResult.Builder(OperationState.EXECUTED).build());
        } catch (Exception ex) {
            LOG.error("Removing roles of expired contract [{}] failed", dto.getId(), ex);
            return Optional.of(new OperationResult.Builder(OperationState.EXCEPTION).setCause(ex).build());
        }
    }
}

19 View Complete Implementation : SampleCronJob.java
Copyright MIT License
Author : ChamithKodikara
/**
 * @author Chamith
 */
@Slf4j
@DisallowConcurrentExecution
public clreplaced SampleCronJob extends QuartzJobBean {

    @Override
    protected void executeInternal(JobExecutionContext context) throws JobExecutionException {
        log.info("SampleCronJob Start................");
        IntStream.range(0, 10).forEach(i -> {
            log.info("Counting - {}", i);
            try {
                Thread.sleep(1000);
            } catch (InterruptedException e) {
                log.error(e.getMessage(), e);
            }
        });
        log.info("SampleCronJob End................");
    }
}

19 View Complete Implementation : AccountProtectionExpirationTaskExecutor.java
Copyright MIT License
Author : bcvsolutions
/**
 * Long running task for remove accounts with expired protection.
 * Expected usage is in cooperation with CronTaskTrigger, running
 * once a day after midnight.
 */
@Service
@DisallowConcurrentExecution
@Description("Removes accounts with expired protection.")
public clreplaced AccountProtectionExpirationTaskExecutor extends AbstractSchedulableTaskExecutor<Boolean> {

    private static final Logger LOG = LoggerFactory.getLogger(AccountProtectionExpirationTaskExecutor.clreplaced);

    private static final String PARAMETER_EXPIRATION = "expiration";

    // 
    @Autowired
    private AccAccountService service;

    // 
    private ZonedDateTime expiration;

    @Override
    protected boolean start() {
        expiration = ZonedDateTime.now();
        LOG.debug("Start: Remove accounts with expired protection for expiration less than [{}]", expiration);
        // 
        return super.start();
    }

    @Override
    public Boolean process() {
        this.counter = 0L;
        boolean canContinue = true;
        while (canContinue) {
            Page<AccAccountDto> expiredAccounts = service.findExpired(expiration, PageRequest.of(0, 100));
            // init count
            if (count == null) {
                count = expiredAccounts.getTotalElements();
            }
            // 
            for (AccAccountDto account : expiredAccounts) {
                service.delete(account);
                counter++;
                canContinue = updateState();
                if (!canContinue) {
                    break;
                }
            }
            if (!expiredAccounts.hasNext()) {
                break;
            }
        }
        LOG.info("End: Remove accounts with expired protection for expiration less than [{}]", expiration);
        return Boolean.TRUE;
    }

    @Override
    public Map<String, Object> getProperties() {
        Map<String, Object> properties = super.getProperties();
        properties.put(PARAMETER_EXPIRATION, expiration);
        return properties;
    }
}

19 View Complete Implementation : RefreshFastTable.java
Copyright GNU General Public License v3.0
Author : dessalines
/**
 * Created by tyler on 6/15/17.
 */
@DisallowConcurrentExecution
public clreplaced RefreshFastTable implements Job {

    public static Logger log = (Logger) LoggerFactory.getLogger(RefreshFastTable.clreplaced);

    private void refreshView() {
        Tools.dbInit();
        log.debug("Refreshing view...");
        String sql = "refresh materialized view concurrently file_view;";
        try {
            Connection conn = new DB("default").connection();
            Statement stmt = conn.createStatement();
            stmt.executeUpdate(sql);
            stmt.close();
        } catch (SQLException e) {
            e.printStackTrace();
        }
        Tools.dbClose();
        log.debug("Done refreshing view.");
    }

    @Override
    public void execute(JobExecutionContext arg0) throws JobExecutionException {
        refreshView();
    }
}

19 View Complete Implementation : ModelMapperCheckerTaskExecutor.java
Copyright MIT License
Author : bcvsolutions
/**
 * Check model mapper is properly initialized to prevent:
 * org.modelmapper.MappingException: ModelMapper mapping errors: Converter org.modelmapper.internal.converter.CollectionConverter@7214dbf8 failed to convert
 *
 * @author Radek Tomiška
 * @since 9.7.10
 */
@Component
@DisallowConcurrentExecution
@Description("Check model mapper is properly initialized to prevent dto mapping exceptions.")
public clreplaced ModelMapperCheckerTaskExecutor extends AbstractSchedulableTaskExecutor<Boolean> {

    @Autowired
    private ModelMapperChecker modelMapperChecker;

    @Override
    public Boolean process() {
        modelMapperChecker.verify();
        // 
        return Boolean.TRUE;
    }
}

19 View Complete Implementation : NoConurrentBaseJob.java
Copyright MIT License
Author : enilu
@Component
@DisallowConcurrentExecution
public clreplaced NoConurrentBaseJob extends BaseJob {
}

19 View Complete Implementation : AuroraCronJob.java
Copyright Apache License 2.0
Author : apache
/**
 * Encapsulates the logic behind a single trigger of a single job key. Multiple executions may run
 * concurrently but only a single instance will be active at a time per job key.
 *
 * <p>
 * Executions may block for long periods of time when waiting for a kill to complete. The Quartz
 * scheduler should therefore be configured with a large number of threads.
 */
@DisallowConcurrentExecution
@PersistJobDataAfterExecution
clreplaced AuroraCronJob implements Job, EventSubscriber {

    private static final Logger LOG = LoggerFactory.getLogger(AuroraCronJob.clreplaced);

    private static final AtomicLong CRON_JOB_TRIGGERS = Stats.exportLong("cron_job_triggers");

    private static final AtomicLong CRON_JOB_MISFIRES = Stats.exportLong("cron_job_misfires");

    private static final AtomicLong CRON_JOB_PARSE_FAILURES = Stats.exportLong("cron_job_parse_failures");

    private static final AtomicLong CRON_JOB_COLLISIONS = Stats.exportLong("cron_job_collisions");

    private static final AtomicLong CRON_JOB_CONCURRENT_RUNS = Stats.exportLong("cron_job_concurrent_runs");

    @VisibleForTesting
    static final Optional<String> KILL_AUDIT_MESSAGE = Optional.of("Killed by cronScheduler");

    private final StateManager stateManager;

    private final BackoffHelper delayedStartBackoff;

    private final BatchWorker<NoResult> batchWorker;

    private final Set<IJobKey> killFollowups = Sets.newConcurrentHashSet();

    /**
     * Annotation for the max cron batch size.
     */
    @VisibleForTesting
    @Qualifier
    @Target({ FIELD, PARAMETER, METHOD })
    @Retention(RUNTIME)
    @interface CronMaxBatchSize {
    }

    static clreplaced CronBatchWorker extends BatchWorker<NoResult> {

        @Inject
        CronBatchWorker(Storage storage, StatsProvider statsProvider, @CronMaxBatchSize int maxBatchSize) {
            super(storage, statsProvider, maxBatchSize);
        }

        @Override
        protected String serviceName() {
            return "CronBatchWorker";
        }
    }

    @Inject
    AuroraCronJob(Config config, StateManager stateManager, CronBatchWorker batchWorker) {
        this.stateManager = requireNonNull(stateManager);
        this.batchWorker = requireNonNull(batchWorker);
        this.delayedStartBackoff = requireNonNull(config.getDelayedStartBackoff());
    }

    @Override
    public void execute(JobExecutionContext context) throws JobExecutionException {
        // We replacedume quartz prevents concurrent runs of this job for a given job key. This allows us
        // to avoid races where we might kill another run's tasks.
        checkState(context.getJobDetail().isConcurrentExectionDisallowed());
        doExecute(context);
    }

    @VisibleForTesting
    void doExecute(JobExecutionContext context) throws JobExecutionException {
        final IJobKey key = Quartz.auroraJobKey(context.getJobDetail().getKey());
        final String path = JobKeys.canonicalString(key);
        // Prevent a concurrent run for this job in case a previous trigger took longer to run.
        // This approach relies on saving the "work in progress" token within the job context itself
        // (see below) and relying on killFollowups to signal "work completion".
        if (context.getJobDetail().getJobDataMap().containsKey(path)) {
            CRON_JOB_CONCURRENT_RUNS.incrementAndGet();
            if (killFollowups.contains(key)) {
                context.getJobDetail().getJobDataMap().remove(path);
                killFollowups.remove(key);
                LOG.info("Resetting job context for cron {}", path);
            } else {
                LOG.info("Ignoring trigger as another concurrent run is active for cron {}", path);
                return;
            }
        }
        CompletableFuture<NoResult> scheduleResult = batchWorker.<NoResult>execute(storeProvider -> {
            Optional<IJobConfiguration> config = storeProvider.getCronJobStore().fetchJob(key);
            if (!config.isPresent()) {
                LOG.warn("Cron was triggered for {} but no job with that key was found in storage.", path);
                CRON_JOB_MISFIRES.incrementAndGet();
                return BatchWorker.NO_RESULT;
            }
            SanitizedCronJob cronJob;
            try {
                cronJob = SanitizedCronJob.from(new SanitizedConfiguration(config.get()));
            } catch (CronException e) {
                LOG.warn("Invalid cron job for {} in storage - failed to parse", key, e);
                CRON_JOB_PARSE_FAILURES.incrementAndGet();
                return BatchWorker.NO_RESULT;
            }
            CronCollisionPolicy collisionPolicy = cronJob.getCronCollisionPolicy();
            LOG.info("Cron triggered for {} at {} with policy {}", path, new Date(), collisionPolicy);
            CRON_JOB_TRIGGERS.incrementAndGet();
            final Query.Builder activeQuery = Query.jobScoped(key).active();
            Set<String> activeTasks = Tasks.ids(storeProvider.getTaskStore().fetchTasks(activeQuery));
            ITaskConfig task = cronJob.getSanitizedConfig().getJobConfig().getTaskConfig();
            Set<Integer> instanceIds = cronJob.getSanitizedConfig().getInstanceIds();
            if (activeTasks.isEmpty()) {
                stateManager.insertPendingTasks(storeProvider, task, instanceIds);
                return BatchWorker.NO_RESULT;
            }
            CRON_JOB_COLLISIONS.incrementAndGet();
            switch(collisionPolicy) {
                case KILL_EXISTING:
                    for (String taskId : activeTasks) {
                        stateManager.changeState(storeProvider, taskId, Optional.empty(), KILLING, KILL_AUDIT_MESSAGE);
                    }
                    LOG.info("Waiting for job to terminate before launching cron job " + path);
                    // Use job detail map to signal a "work in progress" condition to subsequent triggers.
                    context.getJobDetail().getJobDataMap().put(path, null);
                    batchWorker.executeWithReplay(delayedStartBackoff.getBackoffStrategy(), store -> {
                        Query.Builder query = Query.taskScoped(activeTasks).active();
                        if (Iterables.isEmpty(storeProvider.getTaskStore().fetchTasks(query))) {
                            LOG.info("Initiating delayed launch of cron " + path);
                            stateManager.insertPendingTasks(store, task, instanceIds);
                            return new BatchWorker.Result<>(true, null);
                        } else {
                            LOG.info("Not yet safe to run cron " + path);
                            return new BatchWorker.Result<>(false, null);
                        }
                    }).thenAccept(ignored -> {
                        killFollowups.add(key);
                        LOG.info("Finished delayed launch for cron " + path);
                    });
                    break;
                case RUN_OVERLAP:
                    LOG.error("Ignoring trigger for job {} with deprecated collision" + "policy RUN_OVERLAP due to unterminated active tasks.", path);
                    break;
                case CANCEL_NEW:
                    break;
                default:
                    LOG.error("Unrecognized cron collision policy: " + collisionPolicy);
            }
            return BatchWorker.NO_RESULT;
        });
        try {
            scheduleResult.get();
        } catch (ExecutionException | InterruptedException e) {
            LOG.warn("Interrupted while trying to launch cron " + path, e);
            Thread.currentThread().interrupt();
            throw new JobExecutionException(e);
        }
    }

    static clreplaced Config {

        private final BackoffHelper delayedStartBackoff;

        Config(BackoffHelper delayedStartBackoff) {
            this.delayedStartBackoff = requireNonNull(delayedStartBackoff);
        }

        public BackoffHelper getDelayedStartBackoff() {
            return delayedStartBackoff;
        }
    }
}

19 View Complete Implementation : CleanupDestroyedBusinessObjectDataJob.java
Copyright Apache License 2.0
Author : FINRAOS
/**
 * The system job that will cleanup destroyed business object data.
 */
@Component(CleanupDestroyedBusinessObjectDataJob.JOB_NAME)
@DisallowConcurrentExecution
public clreplaced CleanupDestroyedBusinessObjectDataJob extends AbstractSystemJob {

    private static final Logger LOGGER = LoggerFactory.getLogger(CleanupDestroyedBusinessObjectDataJob.clreplaced);

    public static final String JOB_NAME = "cleanupDestroyedBusinessObjectData";

    @Autowired
    private CleanupDestroyedBusinessObjectDataService cleanupDeletedBusinessObjectDataService;

    @Autowired
    private BusinessObjectDataHelper businessObjectDataHelper;

    @Autowired
    private JsonHelper jsonHelper;

    @Autowired
    private ParameterHelper parameterHelper;

    @Override
    protected void executeInternal(JobExecutionContext context) throws JobExecutionException {
        // Log that the system job is started.
        LOGGER.info("Started system job. systemJobName=\"{}\"", JOB_NAME);
        // Get the parameter values.
        int maxBusinessObjectDataInstancesToProcess = parameterHelper.getParameterValueAsInteger(parameters, ConfigurationValue.CLEANUP_DESTROYED_BDATA_JOB_MAX_BDATA_INSTANCES);
        // Log the parameter values.
        LOGGER.info("systemJobName=\"{}\" {}={}", JOB_NAME, ConfigurationValue.CLEANUP_DESTROYED_BDATA_JOB_MAX_BDATA_INSTANCES, maxBusinessObjectDataInstancesToProcess);
        // Continue the processing only if the maximum number of business object data instances
        // that is allowed to be processed in a single run of this system job is greater than zero.
        int processedBusinessObjectDataInstances = 0;
        if (maxBusinessObjectDataInstancesToProcess > 0) {
            // Select restored business object data that is already expired.
            List<BusinessObjectDataStorageUnitKey> businessObjectDataStorageUnitKeys = cleanupDeletedBusinessObjectDataService.getS3StorageUnitsToCleanup(maxBusinessObjectDataInstancesToProcess);
            // Log the number of storage units selected for processing.
            LOGGER.info("Selected for processing S3 storage units. systemJobName=\"{}\" storageUnitCount={}", JOB_NAME, businessObjectDataStorageUnitKeys.size());
            // Try to expire each of the selected storage units.
            for (BusinessObjectDataStorageUnitKey businessObjectDataStorageUnitKey : businessObjectDataStorageUnitKeys) {
                try {
                    cleanupDeletedBusinessObjectDataService.cleanupS3StorageUnit(businessObjectDataStorageUnitKey);
                    processedBusinessObjectDataInstances += 1;
                } catch (RuntimeException runtimeException) {
                    // Log the exception.
                    LOGGER.error("Failed to cleanup a destroyed business object data. systemJobName=\"{}\" storageName=\"{}\" businessObjectDataKey={}", JOB_NAME, businessObjectDataStorageUnitKey.getStorageName(), jsonHelper.objectToJson(businessObjectDataHelper.createBusinessObjectDataKeyFromStorageUnitKey(businessObjectDataStorageUnitKey)), runtimeException);
                }
            }
        }
        // Log the number of cleanup destroyed business object data instances.
        LOGGER.info("Cleanup destroyed business object data instances. systemJobName=\"{}\" businessObjectDataCount={}", JOB_NAME, processedBusinessObjectDataInstances);
        // Log that the system job is ended.
        LOGGER.info("Completed system job. systemJobName=\"{}\"", JOB_NAME);
    }

    @Override
    public void validateParameters(List<Parameter> parameters) {
        // This system job accepts only one optional parameter with an integer value.
        if (!CollectionUtils.isEmpty(parameters)) {
            replacedert.isTrue(parameters.size() == 1, String.format("Too many parameters are specified for \"%s\" system job.", JOB_NAME));
            replacedert.isTrue(parameters.get(0).getName().equalsIgnoreCase(ConfigurationValue.CLEANUP_DESTROYED_BDATA_JOB_MAX_BDATA_INSTANCES.getKey()), String.format("Parameter \"%s\" is not supported by \"%s\" system job.", parameters.get(0).getName(), JOB_NAME));
            parameterHelper.getParameterValueAsInteger(parameters.get(0));
        }
    }

    @Override
    public JobDataMap getJobDataMap() {
        return getJobDataMap(ConfigurationValue.CLEANUP_DESTROYED_BDATA_JOB_MAX_BDATA_INSTANCES);
    }

    @Override
    public String getCronExpression() {
        return configurationHelper.getProperty(ConfigurationValue.CLEANUP_DESTROYED_BDATA_JOB_CRON_EXPRESSION);
    }
}

19 View Complete Implementation : ProcessAllAutomaticRoleByAttributeTaskExecutor.java
Copyright MIT License
Author : bcvsolutions
/**
 * Recalculate all automatic role by attribute for all contracts.
 * Automatic roles was added by iterate over all {@link IdmAutomaticRoleAttributeDto}.
 * For each {@link IdmAutomaticRoleAttributeDto} will be founded all newly preplaceded {@link IdmIdenreplacedyContractDto}
 * and {@link IdmIdenreplacedyContractDto} that contains automatic role and role must be removed.
 * <br />
 * <br />
 * For each contract is created maximal twice {@link IdmRoleRequestDto}. One request contains all newly replacedigned roles
 * and the second contains newly removed roles. This is now only one solution.
 * <br />
 * TODO: after some big refactor can be processed all concept in one request.
 *
 * @author Ondrej Kopr
 */
@Component(ProcessAllAutomaticRoleByAttributeTaskExecutor.TASK_NAME)
@DisallowConcurrentExecution
@Description("Recalculate all automatic roles by attribute. Creates new request with concepts.")
public clreplaced ProcessAllAutomaticRoleByAttributeTaskExecutor extends AbstractSchedulableTaskExecutor<Boolean> {

    public static final String TASK_NAME = "core-process-all-automatic-role-attribute-long-running-task";

    private static final int DEFAULT_PAGE_SIZE_ROLE = 10;

    private static final int DEFAULT_PAGE_SIZE_PAGE_SIZE_IDENreplacedIES = 100;

    @Autowired
    private IdmAutomaticRoleAttributeService automaticRoleAttributeService;

    @Autowired
    private IdmIdenreplacedyContractService idenreplacedyContractService;

    @Autowired
    private IdmIdenreplacedyRoleService idenreplacedyRoleService;

    @Autowired
    private IdmRoleRequestService roleRequestService;

    @Override
    public String getName() {
        return TASK_NAME;
    }

    @Override
    public Boolean process() {
        // found all IdmAutomaticRoleAttributeDto for process
        Page<IdmAutomaticRoleAttributeDto> toProcessOthers = automaticRoleAttributeService.findAllToProcess(null, PageRequest.of(0, DEFAULT_PAGE_SIZE_ROLE, new Sort(Direction.ASC, AbstractEnreplacedy_.id.getName())));
        boolean canContinue = true;
        // 
        this.counter = 0L;
        this.count = Long.valueOf(toProcessOthers.getTotalElements());
        // 
        // others
        while (toProcessOthers.hasContent()) {
            for (IdmAutomaticRoleAttributeDto automaticAttribute : toProcessOthers) {
                // start recalculation
                processAutomaticRoleForContract(automaticAttribute);
                // 
                counter++;
                canContinue = updateState();
                if (!canContinue) {
                    break;
                }
            }
            if (!toProcessOthers.hasNext()) {
                break;
            }
            toProcessOthers = automaticRoleAttributeService.findAllToProcess(null, toProcessOthers.nextPageable());
        }
        // 
        return Boolean.TRUE;
    }

    /**
     * Start recalculation for automatic role. All idenreplacedy roles (newly added and removed) will be added by {@link IdmRoleRequestDto}.
     * But role request is created for each contract twice. One for newly added and one for newly removed. This is now only solutions.
     *
     * @param automaticRolAttributeDto
     */
    private void processAutomaticRoleForContract(IdmAutomaticRoleAttributeDto automaticRolAttributeDto) {
        UUID automaticRoleId = automaticRolAttributeDto.getId();
        // For every query is get first page with 100 rows
        PageRequest defaultPageRequest = PageRequest.of(0, DEFAULT_PAGE_SIZE_PAGE_SIZE_IDENreplacedIES);
        // 
        // process contracts
        Page<UUID> newPreplacededContracts = automaticRoleAttributeService.getContractsForAutomaticRole(automaticRoleId, true, defaultPageRequest);
        Page<UUID> newNotPreplacededContracts = automaticRoleAttributeService.getContractsForAutomaticRole(automaticRoleId, false, defaultPageRequest);
        // 
        boolean canContinue = true;
        while (canContinue) {
            for (UUID contractId : newPreplacededContracts) {
                // Concepts that will be added
                List<IdmConceptRoleRequestDto> concepts = new ArrayList<IdmConceptRoleRequestDto>();
                // 
                IdmIdenreplacedyContractDto contract = idenreplacedyContractService.get(contractId);
                // 
                IdmConceptRoleRequestDto concept = new IdmConceptRoleRequestDto();
                concept.setIdenreplacedyContract(contract.getId());
                concept.setValidFrom(contract.getValidFrom());
                concept.setValidTill(contract.getValidTill());
                concept.setRole(automaticRolAttributeDto.getRole());
                concept.setAutomaticRole(automaticRoleId);
                concept.setOperation(ConceptRoleRequestOperation.ADD);
                concepts.add(concept);
                roleRequestService.executeConceptsImmediate(contract.getIdenreplacedy(), concepts);
                canContinue = updateState();
                if (!canContinue) {
                    break;
                }
            }
            if (newPreplacededContracts.hasNext()) {
                newPreplacededContracts = automaticRoleAttributeService.getContractsForAutomaticRole(automaticRoleId, true, defaultPageRequest);
            } else {
                break;
            }
        }
        // 
        while (canContinue) {
            for (UUID contractId : newNotPreplacededContracts) {
                // Idenreplacedy id is get from embedded idenreplacedy role. This is little speedup.
                UUID idenreplacedyId = null;
                // 
                IdmIdenreplacedyRoleFilter filter = new IdmIdenreplacedyRoleFilter();
                filter.setIdenreplacedyContractId(contractId);
                filter.setAutomaticRoleId(automaticRoleId);
                List<IdmIdenreplacedyRoleDto> idenreplacedyRoles = idenreplacedyRoleService.find(filter, null).getContent();
                // Concepts that will be added
                List<IdmConceptRoleRequestDto> concepts = new ArrayList<>(idenreplacedyRoles.size());
                for (IdmIdenreplacedyRoleDto idenreplacedyRole : idenreplacedyRoles) {
                    IdmConceptRoleRequestDto concept = new IdmConceptRoleRequestDto();
                    concept.setIdenreplacedyContract(contractId);
                    concept.setRole(automaticRolAttributeDto.getRole());
                    concept.setAutomaticRole(automaticRoleId);
                    concept.setIdenreplacedyRole(idenreplacedyRole.getId());
                    concept.setOperation(ConceptRoleRequestOperation.REMOVE);
                    concepts.add(concept);
                    if (idenreplacedyId == null) {
                        IdmIdenreplacedyContractDto contractDto = DtoUtils.getEmbedded(idenreplacedyRole, IdmIdenreplacedyRole_.idenreplacedyContract, IdmIdenreplacedyContractDto.clreplaced, null);
                        idenreplacedyId = contractDto.getIdenreplacedy();
                    }
                }
                roleRequestService.executeConceptsImmediate(idenreplacedyId, concepts);
                canContinue = updateState();
                if (!canContinue) {
                    break;
                }
            }
            if (newNotPreplacededContracts.hasNext()) {
                newNotPreplacededContracts = automaticRoleAttributeService.getContractsForAutomaticRole(automaticRoleId, false, defaultPageRequest);
            } else {
                break;
            }
        }
    }
}

19 View Complete Implementation : RemoveOldLogsTaskExecutor.java
Copyright MIT License
Author : bcvsolutions
/**
 * Long running task for remove old record from event logging tables.
 * Remove {@link IdmLoggingEventDto}, {@link IdmLoggingEventExceptionDto} and {@link IdmLoggingEventPropertyDto}.
 *
 * TODO: rename to DeleteLogTaskExecutor
 *
 * @author Ondrej Kopr <[email protected]>
 * @author Radek Tomiška
 */
@Service(RemoveOldLogsTaskExecutor.TASK_NAME)
@DisallowConcurrentExecution
@Description("Delete logs from event logging tables (events, eventException and eventProperty).")
public clreplaced RemoveOldLogsTaskExecutor extends AbstractSchedulableTaskExecutor<Boolean> {

    private static final Logger LOG = LoggerFactory.getLogger(RemoveOldLogsTaskExecutor.clreplaced);

    // 
    public static final String TASK_NAME = "core-remove-old-logs-long-running-task";

    public static final String PARAMETER_NUMBER_OF_DAYS = "removeRecordOlderThan";

    public static final int DEFAULT_NUMBER_OF_DAYS = 90;

    @Autowired
    private IdmLoggingEventService loggingEventService;

    // 
    private int numberOfDays;

    @Override
    public String getName() {
        return TASK_NAME;
    }

    @Override
    public void init(Map<String, Object> properties) {
        super.init(properties);
        // 
        Long givenNumberOfDays = getParameterConverter().toLong(properties, PARAMETER_NUMBER_OF_DAYS);
        if (givenNumberOfDays != null) {
            numberOfDays = Math.toIntExact(givenNumberOfDays);
        } else {
            numberOfDays = 0;
        }
    }

    @Override
    protected boolean start() {
        LOG.warn("Start deleting logs older than [{}] days.", numberOfDays);
        // 
        return super.start();
    }

    @Override
    protected Boolean end(Boolean result, Exception ex) {
        result = super.end(result, ex);
        LOG.warn("End deleting logs older than [{}]. Processed logs [{}].", numberOfDays, counter);
        return result;
    }

    @Override
    public Boolean process() {
        ZonedDateTime dateTimeTill = ZonedDateTime.now().minusDays(numberOfDays);
        // 
        IdmLoggingEventFilter filter = new IdmLoggingEventFilter();
        filter.setTill(dateTimeTill);
        // only for get total elements
        Page<IdmLoggingEventDto> loggingEvents = loggingEventService.find(filter, PageRequest.of(0, 1, Sort.by(IdmLoggingEvent_.timestmp.getName())));
        // 
        this.count = loggingEvents.getTotalElements();
        this.setCounter(0l);
        this.updateState();
        // 
        int deletedItems = loggingEventService.deleteLowerOrEqualTimestamp(dateTimeTill.toInstant().toEpochMilli());
        this.setCounter(Long.valueOf(deletedItems));
        // 
        LOG.info("Removed logs older than [{}] days was successfully completed. Removed logs [{}].", numberOfDays, this.counter);
        // 
        return Boolean.TRUE;
    }

    @Override
    public List<String> getPropertyNames() {
        List<String> parameters = super.getPropertyNames();
        parameters.add(PARAMETER_NUMBER_OF_DAYS);
        return parameters;
    }

    @Override
    public Map<String, Object> getProperties() {
        Map<String, Object> properties = super.getProperties();
        properties.put(PARAMETER_NUMBER_OF_DAYS, numberOfDays);
        // 
        return properties;
    }

    @Override
    public List<IdmFormAttributeDto> getFormAttributes() {
        IdmFormAttributeDto numberOfDaysAttribute = new IdmFormAttributeDto(PARAMETER_NUMBER_OF_DAYS, PARAMETER_NUMBER_OF_DAYS, PersistentType.LONG);
        numberOfDaysAttribute.setDefaultValue(String.valueOf(DEFAULT_NUMBER_OF_DAYS));
        // 
        return Lists.newArrayList(numberOfDaysAttribute);
    }
}

19 View Complete Implementation : HibernateDataSourceHarvesterJob.java
Copyright GNU General Public License v2.0
Author : 52North
@PersistJobDataAfterExecution
@DisallowConcurrentExecution
public clreplaced HibernateDataSourceHarvesterJob extends ScheduledJob implements Job {

    @Override
    public void execute(JobExecutionContext context) throws JobExecutionException {
    // TODO Auto-generated method stub
    }

    @Override
    public JobDetail createJobDetails() {
        // TODO Auto-generated method stub
        return null;
    }
    // private final static Logger LOGGER =
    // LoggerFactory.getLogger(HibernateDataSourceHarvesterJob.clreplaced);
    // 
    // @Inject
    // private InsertRepository insertRepository;
    // private HibernateSessionHolder sessionHolder;
    // private EventBus eventBus;
    // private DaoFactory daoFactory;
    // 
    // @Inject
    // public void setConnectionProvider(ConnectionProvider connectionProvider)
    // {
    // this.sessionHolder = new HibernateSessionHolder(connectionProvider);
    // }
    // 
    // @Inject
    // public void setDaoFactory(DaoFactory daoFactory) {
    // this.daoFactory = daoFactory;
    // }
    // 
    // public HibernateSessionHolder getConnectionProvider() {
    // return this.sessionHolder;
    // }
    // 
    // public InsertRepository getInsertRepository() {
    // return insertRepository;
    // }
    // 
    // public void setInsertRepository(InsertRepository insertRepository) {
    // this.insertRepository = insertRepository;
    // }
    // 
    // @Inject
    // public void setServiceEventBus(EventBus eventBus) {
    // this.eventBus = eventBus;
    // }
    // 
    // public EventBus getServiceEventBus() {
    // return eventBus;
    // }
    // 
    // @Override
    // public JobDetail createJobDetails() {
    // return JobBuilder.newJob(HibernateDataSourceHarvesterJob.clreplaced)
    // .withIdenreplacedy(getJobName())
    // .build();
    // }
    // 
    // @Override
    // public void execute(JobExecutionContext context) throws
    // JobExecutionException {
    // Session session = null;
    // try {
    // LOGGER.info(context.getJobDetail().getKey() + " execution starts.");
    // session = getConnectionProvider().getSession();
    // ProxyServiceEnreplacedy service =
    // insertRepository.insertService(EnreplacedyBuilder.createService("localDB",
    // "description of localDB", "localhost", "2.0.0"));
    // // insertRepository.cleanUp(service);
    // // insertRepository.prepareInserting(service);
    // harvestOfferings(service, session);
    // harvestSeries(service, session);
    // harvestRelatedFeartures(service, session);
    // LOGGER.info(context.getJobDetail().getKey() + " execution ends.");
    // getServiceEventBus().submit(new UpdateCache());
    // } catch (Exception ex) {
    // LOGGER.error("Error while harvesting cache!", ex);
    // } finally {
    // getConnectionProvider().returnSession(session);
    // }
    // }
    // 
    // private void harvestOfferings(ServiceEnreplacedy service, Session session)
    // throws OwsExceptionReport {
    // Map<String, OfferingTimeExtrema> offeringTimeExtremas =
    // daoFactory.getOfferingDAO().getOfferingTimeExtrema(null, session);
    // for (Offering offering :
    // daoFactory.getOfferingDAO().getOfferings(session)) {
    // OfferingEnreplacedy offferingEnreplacedy = EnreplacedyBuilder.createOffering(offering,
    // service, true, true);
    // if (offeringTimeExtremas.containsKey(offering.getIdentifier())) {
    // OfferingTimeExtrema offeringTimeExtrema =
    // offeringTimeExtremas.get(offering.getIdentifier());
    // offferingEnreplacedy.setPhenomenonTimeStart(offeringTimeExtrema.getMinPhenomenonTime().toDate());
    // offferingEnreplacedy.setPhenomenonTimeEnd(offeringTimeExtrema.getMaxPhenomenonTime().toDate());
    // offferingEnreplacedy.setResultTimeStart(offeringTimeExtrema.getMinResultTime().toDate());
    // offferingEnreplacedy.setResultTimeEnd(offeringTimeExtrema.getMaxResultTime().toDate());
    // }
    // ReferencedEnvelope spatialFilteringProfileEnvelope =
    // daoFactory.getObservationDAO().getSpatialFilteringProfileEnvelopeForOfferingId(offering.getIdentifier(),
    // session);
    // if (spatialFilteringProfileEnvelope != null &&
    // spatialFilteringProfileEnvelope.isSetEnvelope()) {
    // offferingEnreplacedy.setEnvelope(new
    // GeometryFactory().toGeometry(JTSConverter.convert(spatialFilteringProfileEnvelope.getEnvelope())));
    // }
    // insertRepository.insertOffering(offferingEnreplacedy);
    // }
    // }
    // 
    // private void harvestSeries(ServiceEnreplacedy service, Session session) throws
    // OwsExceptionReport {
    // AbstractSeriesDAO seriesDAO = daoFactory.getSeriesDAO();
    // for (Series series : seriesDAO.getSeries(session)) {
    // DatasetEnreplacedy<?> dataset = EnreplacedyBuilder.createDataset(series, service);
    // if (dataset != null) {
    // insertRepository.insertDataset(dataset);
    // }
    // }
    // }
    // 
    // private void harvestRelatedFeartures(ServiceEnreplacedy service, Session
    // session) {
    // if (HibernateHelper.isEnreplacedySupported(TOffering.clreplaced)) {
    // Set<RelatedFeatureEnreplacedy> relatedFeatures = new HashSet<>();
    // for (Offering offering :
    // daoFactory.getOfferingDAO().getOfferings(session)) {
    // if (offering instanceof TOffering && ((TOffering)
    // offering).hasRelatedFeatures()) {
    // for (RelatedFeature relatedFeatureEnreplacedy : ((TOffering)
    // offering).getRelatedFeatures()) {
    // relatedFeatures.add(EnreplacedyBuilder.createRelatedFeature(relatedFeatureEnreplacedy,
    // service));
    // }
    // }
    // }
    // insertRepository.insertRelatedFeature(relatedFeatures);
    // }
    // }
}

19 View Complete Implementation : DynamicExpressionObserverJob.java
Copyright Apache License 2.0
Author : apache
/**
 * This job is only active, if configurable cron-expressions are used - e.g.: @Scheduled(cronExpression = "{myKey}").
 * It observes jobs with configurable cron-expressions and updates their job-triggers once a config-change was detected.
 * Per default this job gets executed once per minute. That can be changed via config-entry:
 * deltaspike.scheduler.dynamic-expression.observer-interval=[any valid cron-expression]
 */
@DisallowConcurrentExecution
@PersistJobDataAfterExecution
public clreplaced DynamicExpressionObserverJob implements Deactivatable, Job {

    static final String CONFIG_EXPRESSION_KEY = "ds_configExpression";

    static final String ACTIVE_CRON_EXPRESSION_KEY = "ds_activeCronExpression";

    static final String TRIGGER_ID_KEY = "ds_triggerKey";

    static final String OBSERVER_POSTFIX = "_observer";

    private static final Logger LOG = Logger.getLogger(DynamicExpressionObserverJob.clreplaced.getName());

    @Inject
    private Scheduler<Job> scheduler;

    @Override
    public void execute(JobExecutionContext context) throws JobExecutionException {
        JobDataMap jobDataMap = context.getMergedJobDataMap();
        String configExpression = jobDataMap.getString(CONFIG_EXPRESSION_KEY);
        String triggerId = jobDataMap.getString(TRIGGER_ID_KEY);
        String activeCronExpression = jobDataMap.getString(ACTIVE_CRON_EXPRESSION_KEY);
        String configKey = configExpression.substring(1, configExpression.length() - 1);
        String configuredValue = ConfigResolver.getPropertyAwarePropertyValue(configKey, activeCronExpression);
        if (!activeCronExpression.equals(configuredValue)) {
            // both #put calls are needed currently
            context.getJobDetail().getJobDataMap().put(ACTIVE_CRON_EXPRESSION_KEY, configuredValue);
            context.getTrigger().getJobDataMap().put(ACTIVE_CRON_EXPRESSION_KEY, configuredValue);
            BeanProvider.injectFields(this);
            JobKey observerJobKey = context.getJobDetail().getKey();
            String observedJobName = observerJobKey.getName().substring(0, observerJobKey.getName().length() - OBSERVER_POSTFIX.length());
            JobKey observedJobKey = new JobKey(observedJobName, observerJobKey.getGroup());
            Trigger trigger = TriggerBuilder.newTrigger().withIdenreplacedy(triggerId).forJob(observedJobName, observedJobKey.getGroup()).withSchedule(CronScheduleBuilder.cronSchedule(configuredValue)).build();
            // use rescheduleJob instead of delete + add
            // (unwrap is ok here, because this clreplaced will only get active in case of a quartz-scheduler)
            org.quartz.Scheduler quartzScheduler = scheduler.unwrap(org.quartz.Scheduler.clreplaced);
            try {
                quartzScheduler.rescheduleJob(trigger.getKey(), trigger);
            } catch (SchedulerException e) {
                LOG.warning("failed to updated cron-expression for " + observedJobKey);
            }
        }
    }
}

19 View Complete Implementation : EncodeJob.java
Copyright Apache License 2.0
Author : apache
@DisallowConcurrentExecution
public clreplaced EncodeJob implements Job {

    private static final Logger log = getLogger(EncodeJob.clreplaced);

    public static final String CAPTURE_KEY = "capture";

    private Robot robot;

    private ScreenDimensions dim;

    private Rectangle screen = null;

    private int[][] image = null;

    public EncodeJob() {
        try {
            robot = new Robot();
        } catch (AWTException e) {
            log.error("encode: Unexpected Error while creating robot", e);
        }
    }

    @Override
    public void execute(JobExecutionContext context) throws JobExecutionException {
        JobDataMap data = context.getJobDetail().getJobDataMap();
        CaptureScreen capture = (CaptureScreen) data.get(CAPTURE_KEY);
        if (screen == null) {
            dim = capture.getDim();
            screen = new Rectangle(dim.getSpinnerX(), dim.getSpinnerY(), dim.getSpinnerWidth(), dim.getSpinnerHeight());
        }
        long start = 0;
        if (log.isTraceEnabled()) {
            start = System.currentTimeMillis();
        }
        image = ScreenV1Encoder.getImage(dim, screen, robot);
        if (log.isTraceEnabled()) {
            log.trace(String.format("encode: Image was captured in %s ms, size %sk", System.currentTimeMillis() - start, 4 * image.length * image[0].length / 1024));
            start = System.currentTimeMillis();
        }
        try {
            VideoData vData = capture.getEncoder().encode(image);
            if (log.isTraceEnabled()) {
                long now = System.currentTimeMillis();
                log.trace(String.format("encode: Image was encoded in %s ms, timestamp is %s", now - start, now - capture.getStartTime()));
            }
            capture.getFrames().offer(vData);
            capture.getEncoder().createUnalteredFrame();
        } catch (Exception e) {
            log.error("Error while encoding: ", e);
        }
    }
}

19 View Complete Implementation : DeleteLongRunningTaskExecutor.java
Copyright MIT License
Author : bcvsolutions
/**
 * Delete long running tasks.
 *
 * @author Radek Tomiška
 * @since 9.7.12
 */
@Service(DeleteLongRunningTaskExecutor.TASK_NAME)
@DisallowConcurrentExecution
@Description("Delete long running tasks.")
public clreplaced DeleteLongRunningTaskExecutor extends AbstractSchedulableStatefulExecutor<IdmLongRunningTaskDto> {

    private static final org.slf4j.Logger LOG = org.slf4j.LoggerFactory.getLogger(DeleteLongRunningTaskExecutor.clreplaced);

    public static final String TASK_NAME = "core-delete-long-running-task";

    // events older than
    public static final String PARAMETER_NUMBER_OF_DAYS = "numberOfDays";

    // archive state
    public static final String PARAMETER_OPERATION_STATE = "operationState";

    // 
    public static final int DEFAULT_NUMBER_OF_DAYS = 90;

    public static final OperationState DEFAULT_OPERATION_STATE = OperationState.EXECUTED;

    // 
    @Autowired
    private IdmLongRunningTaskService service;

    // 
    // optional
    private int numberOfDays = 0;

    // optional
    private OperationState operationState;

    @Override
    public String getName() {
        return TASK_NAME;
    }

    @Override
    public void init(Map<String, Object> properties) {
        super.init(properties);
        // 
        Long givenNumberOfDays = getParameterConverter().toLong(properties, PARAMETER_NUMBER_OF_DAYS);
        if (givenNumberOfDays != null) {
            numberOfDays = Math.toIntExact(givenNumberOfDays);
        } else {
            numberOfDays = 0;
        }
        operationState = getParameterConverter().toEnum(properties, PARAMETER_OPERATION_STATE, OperationState.clreplaced);
    }

    @Override
    protected boolean start() {
        LOG.warn("Start deleting long running tasks older than [{}] days in state [{}].", numberOfDays, operationState);
        // 
        return super.start();
    }

    @Override
    protected Boolean end(Boolean result, Exception ex) {
        result = super.end(result, ex);
        LOG.warn("End deleting long running tasks older than [{}] days in state [{}]. Processed lrts [{}].", numberOfDays, operationState, counter);
        return result;
    }

    @Override
    public Page<IdmLongRunningTaskDto> gereplacedemsToProcess(Pageable pageable) {
        IdmLongRunningTaskFilter filter = new IdmLongRunningTaskFilter();
        filter.setOperationState(operationState);
        filter.setRunning(Boolean.FALSE);
        if (numberOfDays > 0) {
            filter.setTill(LocalDate.now().atStartOfDay(ZoneId.systemDefault()).minusDays(numberOfDays));
        }
        // new pageable is given => records are deleted and we need the first page all time
        return service.find(filter, PageRequest.of(0, pageable.getPageSize()));
    }

    @Override
    public Optional<OperationResult> processItem(IdmLongRunningTaskDto dto) {
        service.delete(dto);
        // 
        return Optional.of(new OperationResult.Builder(OperationState.EXECUTED).build());
    }

    @Override
    public List<String> getPropertyNames() {
        List<String> parameters = super.getPropertyNames();
        parameters.add(PARAMETER_NUMBER_OF_DAYS);
        parameters.add(PARAMETER_OPERATION_STATE);
        // 
        return parameters;
    }

    @Override
    public Map<String, Object> getProperties() {
        Map<String, Object> properties = super.getProperties();
        properties.put(PARAMETER_NUMBER_OF_DAYS, numberOfDays);
        properties.put(PARAMETER_OPERATION_STATE, operationState);
        // 
        return properties;
    }

    @Override
    public List<IdmFormAttributeDto> getFormAttributes() {
        IdmFormAttributeDto numberOfDaysAttribute = new IdmFormAttributeDto(PARAMETER_NUMBER_OF_DAYS, PARAMETER_NUMBER_OF_DAYS, PersistentType.LONG);
        numberOfDaysAttribute.setDefaultValue(String.valueOf(DEFAULT_NUMBER_OF_DAYS));
        // 
        IdmFormAttributeDto operationStateAttribute = new IdmFormAttributeDto(PARAMETER_OPERATION_STATE, PARAMETER_OPERATION_STATE, PersistentType.ENUMERATION);
        operationStateAttribute.setDefaultValue(DEFAULT_OPERATION_STATE.name());
        operationStateAttribute.setFaceType(BaseFaceType.OPERATION_STATE_ENUM);
        // 
        return Lists.newArrayList(numberOfDaysAttribute, operationStateAttribute);
    }

    @Override
    public boolean supportsQueue() {
        return false;
    }

    @Override
    public boolean supportsDryRun() {
        // TODO: get context (or LRT) in gereplacedems to process ...
        return false;
    }

    @Override
    public boolean requireNewTransaction() {
        return true;
    }
}

19 View Complete Implementation : ByIDETLJob.java
Copyright Apache License 2.0
Author : boubei-com
/**
 * 按ID抽取Job,可以配置多个,对应不同的间隔时间
 */
@DisallowConcurrentExecution
public clreplaced ByIDETLJob extends AbstractETLJob {

    protected String etlType() {
        return "byID";
    }

    protected Object getMaxID(Task task) {
        // 优先从目标表里取出最大ID
        try {
            String preSQL = task.getPreRepeatSQL();
            if (!EasyUtils.isNullOrEmpty(preSQL)) {
                Object maxID = SQLExcutor.query(task.getTargetDS(), preSQL).get(0).get("maxid");
                return EasyUtils.obj2Long(maxID);
            }
        } catch (Exception e) {
        }
        // 如果没有设置,则取日志里记录下来的最大ID
        String hql = "select max(maxID) from TaskLog where taskId = ? and exception='no'";
        List<?> list = commonService.getList(hql, task.getId());
        Object maxId = null;
        try {
            maxId = list.get(0);
        } catch (Exception e) {
        }
        return maxId;
    }

    public TaskLog excuteTask(Task task) {
        Long maxID = EasyUtils.obj2Long(getMaxID(task));
        // 如果任务上设置的ID大于日志里记录的最大ID,则说明是人为单独设置了任务上的ID
        maxID = Math.max(maxID, EasyUtils.obj2Long(task.getStartID()));
        log.info(task.getName() + " is starting! 【" + maxID + "】");
        long start = System.currentTimeMillis();
        TaskLog tLog = new TaskLog(task);
        try {
            Long[] result = etlByID(task, maxID);
            tLog.setException("no");
            tLog.setDetail("total=" + result[0]);
            tLog.setMaxID(result[1]);
        } catch (Exception e) {
            setException(tLog, task, e);
        } finally {
            // 记录任务日志,不管是否成功
            tLog.setRunningMS(System.currentTimeMillis() - start);
            commonService.create(tLog);
        }
        log.info(task.getName() + "Done! Cost time: " + (System.currentTimeMillis() - start));
        return tLog;
    }

    protected Long[] etlByID(Task task, Long startID) {
        Report report;
        String source = task.getSourceScript();
        try {
            Long reportId = EasyUtils.obj2Long(source);
            report = reportService.getReport(reportId, false);
        } catch (Exception e) {
            report = new Report();
            report.setName(task.getName());
            report.setDatasource(task.getSourceDS());
            report.setScript(task.getSourceScript());
            report.setParam("[{'label':'maxID', 'type':'number'}]");
        }
        // 始终按ID排序
        report.setScript("select * from (" + report.getScript() + ") t order by id asc");
        Map<String, String> paramsMap = new HashMap<String, String>();
        paramsMap.put("param1", String.valueOf(startID));
        SQLExcutor ex = ReportQuery.excute(report, paramsMap, 1, 1);
        int total = ex.count;
        int totalPages = PageInfo.calTotalPages(total, PAGE_SIZE);
        // 分页查询,批量插入
        Long maxID = startID;
        String outputScript = task.getTargetScript();
        for (int pageNum = 1; pageNum <= totalPages; pageNum++) {
            // 每次循环开始前先检查任务是否被人为关停了
            checkTask(task.getId());
            long start = System.currentTimeMillis();
            ex = ReportQuery.excute(report, paramsMap, pageNum, PAGE_SIZE);
            List<Map<Integer, Object>> list = new ArrayList<Map<Integer, Object>>();
            for (Map<String, Object> row : ex.result) {
                Map<Integer, Object> item = new HashMap<Integer, Object>();
                for (String field : ex.selectFields) {
                    Object value = row.get(field);
                    item.put(item.size() + 1, value);
                }
                list.add(item);
                maxID = Math.max(maxID, EasyUtils.obj2Long(row.get("id")));
            }
            SQLExcutor.excuteBatch(outputScript, list, task.getTargetDS());
            // 如果分多页插入,则每每一页插入记录日志(除了最后一页,最后一页在excuteTask里记)
            if (pageNum < totalPages) {
                TaskLog tLog = new TaskLog(task);
                tLog.setException("no");
                tLog.setDetail("page-" + pageNum + "=" + list.size());
                tLog.setMaxID(maxID);
                tLog.setRunningMS(System.currentTimeMillis() - start);
                commonService.create(tLog);
            }
        }
        return new Long[] { (long) total, maxID };
    }
}