target stringlengths 20 113k | src_fm stringlengths 11 86.3k | src_fm_fc stringlengths 21 86.4k | src_fm_fc_co stringlengths 30 86.4k | src_fm_fc_ms stringlengths 42 86.8k | src_fm_fc_ms_ff stringlengths 43 86.8k |
|---|---|---|---|---|---|
@Test public void testPut() { TestActorRef<MCacheDb> actorRef = TestActorRef.create(actorSystem, Props.create(MCacheDb.class)); actorRef.tell(new SetRequest("key", "value"), ActorRef.noSender()); MCacheDb mCacheDb = actorRef.underlyingActor(); Assert.assertEquals(mCacheDb.getValue("key"), "value"); } | public String getValue(String key) { return this.map.get(key); } | MCacheDb extends AbstractActor { public String getValue(String key) { return this.map.get(key); } } | MCacheDb extends AbstractActor { public String getValue(String key) { return this.map.get(key); } } | MCacheDb extends AbstractActor { public String getValue(String key) { return this.map.get(key); } @Override Receive createReceive(); String getValue(String key); } | MCacheDb extends AbstractActor { public String getValue(String key) { return this.map.get(key); } @Override Receive createReceive(); String getValue(String key); } |
@Test public void testSayHelloProxy() { Person person = new Person(); person.setFirstName("Jane"); person.setLastName("Doe"); Greeting greeting = helloWorldRequesterProxy.sayHello(person); assertEquals("Hello Jane Doe!", greeting.getText()); } | @Override public Greeting sayHello(Person person) { String firstName = person.getFirstName(); LOGGER.info("firstName={}", firstName); String lasttName = person.getLastName(); LOGGER.info("lastName={}", lasttName); ObjectFactory factory = new ObjectFactory(); Greeting response = factory.createGreeting(); String greeting... | HelloWorldImpl implements HelloWorldPortType { @Override public Greeting sayHello(Person person) { String firstName = person.getFirstName(); LOGGER.info("firstName={}", firstName); String lasttName = person.getLastName(); LOGGER.info("lastName={}", lasttName); ObjectFactory factory = new ObjectFactory(); Greeting respo... | HelloWorldImpl implements HelloWorldPortType { @Override public Greeting sayHello(Person person) { String firstName = person.getFirstName(); LOGGER.info("firstName={}", firstName); String lasttName = person.getLastName(); LOGGER.info("lastName={}", lasttName); ObjectFactory factory = new ObjectFactory(); Greeting respo... | HelloWorldImpl implements HelloWorldPortType { @Override public Greeting sayHello(Person person) { String firstName = person.getFirstName(); LOGGER.info("firstName={}", firstName); String lasttName = person.getLastName(); LOGGER.info("lastName={}", lasttName); ObjectFactory factory = new ObjectFactory(); Greeting respo... | HelloWorldImpl implements HelloWorldPortType { @Override public Greeting sayHello(Person person) { String firstName = person.getFirstName(); LOGGER.info("firstName={}", firstName); String lasttName = person.getLastName(); LOGGER.info("lastName={}", lasttName); ObjectFactory factory = new ObjectFactory(); Greeting respo... |
@Test public void testResolveWithoutAnyServiceMfaAttributes() throws Exception { final WebApplicationService was = getTargetService(); final Authentication auth = getAuthentication(true); final RegisteredService rswa = TestUtils.getRegisteredService("test1"); final DefaultRegisteredServiceMfaRoleProcessorImpl resolver ... | public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFactorAuthenticationRequestContext> list = new ArrayList<>(); if (authentication != null && ta... | DefaultRegisteredServiceMfaRoleProcessorImpl implements RegisteredServiceMfaRoleProcessor { public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFa... | DefaultRegisteredServiceMfaRoleProcessorImpl implements RegisteredServiceMfaRoleProcessor { public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFa... | DefaultRegisteredServiceMfaRoleProcessorImpl implements RegisteredServiceMfaRoleProcessor { public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFa... | DefaultRegisteredServiceMfaRoleProcessorImpl implements RegisteredServiceMfaRoleProcessor { public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFa... |
@Test public void testResolveWithoutIncompleteServiceMfaAttributes() throws Exception { final WebApplicationService was = getTargetService(); final Authentication auth = getAuthentication(true); final RegisteredService rswa = TestUtils.getRegisteredService("test1"); DefaultRegisteredServiceProperty prop = new DefaultRe... | public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFactorAuthenticationRequestContext> list = new ArrayList<>(); if (authentication != null && ta... | DefaultRegisteredServiceMfaRoleProcessorImpl implements RegisteredServiceMfaRoleProcessor { public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFa... | DefaultRegisteredServiceMfaRoleProcessorImpl implements RegisteredServiceMfaRoleProcessor { public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFa... | DefaultRegisteredServiceMfaRoleProcessorImpl implements RegisteredServiceMfaRoleProcessor { public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFa... | DefaultRegisteredServiceMfaRoleProcessorImpl implements RegisteredServiceMfaRoleProcessor { public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFa... |
@Test public void testResolveServiceWithMfaAttributesUserInRole() throws Exception { final WebApplicationService was = getTargetService(); final Authentication auth = getAuthentication(true); final RegisteredService rswa = TestUtils.getRegisteredService("test1"); DefaultRegisteredServiceProperty prop = new DefaultRegis... | public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFactorAuthenticationRequestContext> list = new ArrayList<>(); if (authentication != null && ta... | DefaultRegisteredServiceMfaRoleProcessorImpl implements RegisteredServiceMfaRoleProcessor { public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFa... | DefaultRegisteredServiceMfaRoleProcessorImpl implements RegisteredServiceMfaRoleProcessor { public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFa... | DefaultRegisteredServiceMfaRoleProcessorImpl implements RegisteredServiceMfaRoleProcessor { public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFa... | DefaultRegisteredServiceMfaRoleProcessorImpl implements RegisteredServiceMfaRoleProcessor { public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFa... |
@Test public void testResolveServiceWithOnlyAuthnMethodAttribute() throws Exception { final WebApplicationService was = getTargetService(); final Authentication auth = getAuthentication(true); final RegisteredService rswa = TestUtils.getRegisteredService("test1"); final DefaultRegisteredServiceProperty prop = new Defau... | public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFactorAuthenticationRequestContext> list = new ArrayList<>(); if (authentication != null && ta... | DefaultRegisteredServiceMfaRoleProcessorImpl implements RegisteredServiceMfaRoleProcessor { public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFa... | DefaultRegisteredServiceMfaRoleProcessorImpl implements RegisteredServiceMfaRoleProcessor { public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFa... | DefaultRegisteredServiceMfaRoleProcessorImpl implements RegisteredServiceMfaRoleProcessor { public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFa... | DefaultRegisteredServiceMfaRoleProcessorImpl implements RegisteredServiceMfaRoleProcessor { public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFa... |
@Test public void shouldRemoveRevisionsOnly() throws Exception { final int NUMBER_OF_REVISIONS = 3; final int NUMBER_OF_RESPONSES = 2; RevisionInformation revisionInformation = new RevisionInformation("DATASET", DATA_PROVIDER, SOURCE + REPRESENTATION_NAME, REVISION_NAME, REVISION_PROVIDER, getUTCDateString(date)); revi... | void setRevisionInformation(RevisionInformation revisionInformation) { this.revisionInformation = revisionInformation; } | RevisionRemoverJob implements Runnable { void setRevisionInformation(RevisionInformation revisionInformation) { this.revisionInformation = revisionInformation; } } | RevisionRemoverJob implements Runnable { void setRevisionInformation(RevisionInformation revisionInformation) { this.revisionInformation = revisionInformation; } RevisionRemoverJob(DataSetServiceClient dataSetServiceClient, RecordServiceClient recordServiceClient, RevisionInformation revisionInformation, RevisionServic... | RevisionRemoverJob implements Runnable { void setRevisionInformation(RevisionInformation revisionInformation) { this.revisionInformation = revisionInformation; } RevisionRemoverJob(DataSetServiceClient dataSetServiceClient, RecordServiceClient recordServiceClient, RevisionInformation revisionInformation, RevisionServic... | RevisionRemoverJob implements Runnable { void setRevisionInformation(RevisionInformation revisionInformation) { this.revisionInformation = revisionInformation; } RevisionRemoverJob(DataSetServiceClient dataSetServiceClient, RecordServiceClient recordServiceClient, RevisionInformation revisionInformation, RevisionServic... |
@Test public void shouldEmitSameTupleWhenNoResourcesHasToBeChecked() { Tuple anchorTuple = mock(TupleImpl.class); StormTaskTuple tuple = prepareTupleWithLinksCountEqualsToZero(); linkCheckBolt.execute(anchorTuple, tuple); verify(outputCollector, times(1)).emit( eq("NotificationStream"), eq(anchorTuple), captor.capture(... | @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resources", "", StormTaskTupleHelper.getRecor... | LinkCheckBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resou... | LinkCheckBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resou... | LinkCheckBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resou... | LinkCheckBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resou... |
@Test public void shouldCheckOneLinkWithoutEmittingTuple() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); StormTaskTuple tuple = prepareRandomTuple(); linkCheckBolt.execute(anchorTuple, tuple); verify(outputCollector, times(0)).emit(eq("NotificationStream"), any(Tuple.class), Mockito.anyList()); verify(l... | @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resources", "", StormTaskTupleHelper.getRecor... | LinkCheckBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resou... | LinkCheckBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resou... | LinkCheckBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resou... | LinkCheckBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resou... |
@Test public void shouldEmitTupleAfterCheckingAllResourcesFromFile() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); StormTaskTuple tuple = prepareRandomTuple(); linkCheckBolt.execute(anchorTuple, tuple); verify(outputCollector, times(0)).emit(eq("NotificationStream"), any(Tuple.class), Mockito.anyList())... | @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resources", "", StormTaskTupleHelper.getRecor... | LinkCheckBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resou... | LinkCheckBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resou... | LinkCheckBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resou... | LinkCheckBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resou... |
@Test public void shouldEmitTupleWithErrorIncluded() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); doThrow(new LinkCheckingException(new Throwable())).when(linkChecker).performLinkChecking(Mockito.anyString()); StormTaskTuple tuple = prepareRandomTuple(); linkCheckBolt.execute(anchorTuple, tuple); linkC... | @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resources", "", StormTaskTupleHelper.getRecor... | LinkCheckBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resou... | LinkCheckBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resou... | LinkCheckBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resou... | LinkCheckBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resou... |
@Test public void enrichEdmInternalSuccessfully() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); byte[] FILE_DATA = Files.readAllBytes(Paths.get("src/test/resources/Item_35834473_test.xml")); StormTaskTuple tuple = new StormTaskTuple(TASK_ID, TASK_NAME, SOURCE_VERSION_URL, FILE_DATA, new HashMap<String, ... | @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { String fileContent = new String(stormTaskTuple.getFileData()); LOGGER.info("starting enrichment on {} .....", stormTaskTuple.getFileUrl()); String output = enrichmentWorker.process(fileContent); LOGGER.info("Finishing enrichment on ... | EnrichmentBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { String fileContent = new String(stormTaskTuple.getFileData()); LOGGER.info("starting enrichment on {} .....", stormTaskTuple.getFileUrl()); String output = enrichmentWorker.process(fileConten... | EnrichmentBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { String fileContent = new String(stormTaskTuple.getFileData()); LOGGER.info("starting enrichment on {} .....", stormTaskTuple.getFileUrl()); String output = enrichmentWorker.process(fileConten... | EnrichmentBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { String fileContent = new String(stormTaskTuple.getFileData()); LOGGER.info("starting enrichment on {} .....", stormTaskTuple.getFileUrl()); String output = enrichmentWorker.process(fileConten... | EnrichmentBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { String fileContent = new String(stormTaskTuple.getFileData()); LOGGER.info("starting enrichment on {} .....", stormTaskTuple.getFileUrl()); String output = enrichmentWorker.process(fileConten... |
@Test public void sendErrorNotificationWhenTheEnrichmentFails() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); byte[] FILE_DATA = Files.readAllBytes(Paths.get("src/test/resources/example1.xml")); StormTaskTuple tuple = new StormTaskTuple(TASK_ID, TASK_NAME, SOURCE_VERSION_URL, FILE_DATA, prepareStormTask... | @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { String fileContent = new String(stormTaskTuple.getFileData()); LOGGER.info("starting enrichment on {} .....", stormTaskTuple.getFileUrl()); String output = enrichmentWorker.process(fileContent); LOGGER.info("Finishing enrichment on ... | EnrichmentBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { String fileContent = new String(stormTaskTuple.getFileData()); LOGGER.info("starting enrichment on {} .....", stormTaskTuple.getFileUrl()); String output = enrichmentWorker.process(fileConten... | EnrichmentBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { String fileContent = new String(stormTaskTuple.getFileData()); LOGGER.info("starting enrichment on {} .....", stormTaskTuple.getFileUrl()); String output = enrichmentWorker.process(fileConten... | EnrichmentBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { String fileContent = new String(stormTaskTuple.getFileData()); LOGGER.info("starting enrichment on {} .....", stormTaskTuple.getFileUrl()); String output = enrichmentWorker.process(fileConten... | EnrichmentBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { String fileContent = new String(stormTaskTuple.getFileData()); LOGGER.info("starting enrichment on {} .....", stormTaskTuple.getFileUrl()); String output = enrichmentWorker.process(fileConten... |
@Test public void harvestingForAllParametersSpecified() throws IOException, HarvesterException { Tuple anchorTuple = mock(TupleImpl.class); InputStream fileContentAsStream = getFileContentAsStream("/sampleEDMRecord.xml"); when(harvester.harvestRecord(anyString(), anyString(), anyString(), any(XPathExpression.class), an... | @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecord... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... |
@Test public void shouldHarvestRecordInEDMAndExtractIdentifiers() throws IOException, HarvesterException { Tuple anchorTuple = mock(TupleImpl.class); InputStream fileContentAsStream = getFileContentAsStream("/sampleEDMRecord.xml"); when(harvester.harvestRecord(anyString(), anyString(), anyString(), any(XPathExpression.... | @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecord... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... |
@Test public void shouldHarvestRecordInEDMAndNotUseHeaderIdentifierIfParameterIsDifferentThanTrue() throws IOException, HarvesterException { Tuple anchorTuple = mock(TupleImpl.class); InputStream fileContentAsStream = getFileContentAsStream("/sampleEDMRecord.xml"); when(harvester.harvestRecord(anyString(), anyString(),... | @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecord... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... |
@Test public void shouldHarvestRecordInEDMAndUseHeaderIdentifierIfSpecifiedInTaskParameters() throws IOException, HarvesterException { Tuple anchorTuple = mock(TupleImpl.class); InputStream fileContentAsStream = getFileContentAsStream("/sampleEDMRecord.xml"); when(harvester.harvestRecord(anyString(), anyString(), anySt... | @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecord... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... |
@Test public void shouldInvokeAllTheRemovalStepsExcludingErrorReports() { removerInvoker.executeInvokerForSingleTask(TASK_ID, false); verify(remover, times(1)).removeNotifications((eq(TASK_ID))); verify(remover, times(1)).removeStatistics((eq(TASK_ID))); verify(remover, times(0)).removeErrorReports((eq(TASK_ID))); } | public void executeInvokerForSingleTask(long taskId, boolean shouldRemoveErrors) { remover.removeNotifications(taskId); LOGGER.info("Logs for task Id:" + taskId + " were removed successfully"); LOGGER.info("Removing statistics for:" + taskId + " was started. This step could take times depending on the size of the task"... | RemoverInvoker { public void executeInvokerForSingleTask(long taskId, boolean shouldRemoveErrors) { remover.removeNotifications(taskId); LOGGER.info("Logs for task Id:" + taskId + " were removed successfully"); LOGGER.info("Removing statistics for:" + taskId + " was started. This step could take times depending on the ... | RemoverInvoker { public void executeInvokerForSingleTask(long taskId, boolean shouldRemoveErrors) { remover.removeNotifications(taskId); LOGGER.info("Logs for task Id:" + taskId + " were removed successfully"); LOGGER.info("Removing statistics for:" + taskId + " was started. This step could take times depending on the ... | RemoverInvoker { public void executeInvokerForSingleTask(long taskId, boolean shouldRemoveErrors) { remover.removeNotifications(taskId); LOGGER.info("Logs for task Id:" + taskId + " were removed successfully"); LOGGER.info("Removing statistics for:" + taskId + " was started. This step could take times depending on the ... | RemoverInvoker { public void executeInvokerForSingleTask(long taskId, boolean shouldRemoveErrors) { remover.removeNotifications(taskId); LOGGER.info("Logs for task Id:" + taskId + " were removed successfully"); LOGGER.info("Removing statistics for:" + taskId + " was started. This step could take times depending on the ... |
@Test public void shouldHarvestRecordInEDMAndUseHeaderIdentifierAndTrimItIfSpecifiedInTaskParameters() throws IOException, HarvesterException { InputStream fileContentAsStream = getFileContentAsStream("/sampleEDMRecord.xml"); Tuple anchorTuple = mock(TupleImpl.class); when(harvester.harvestRecord(anyString(), anyString... | @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecord... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... |
@Test public void shouldEmitErrorOnHarvestingExceptionWhenCannotExctractEuropeanaIdFromEDM() throws HarvesterException { Tuple anchorTuple = mock(TupleImpl.class); InputStream fileContentAsStream = getFileContentAsStream("/corruptedEDMRecord.xml"); when(harvester.harvestRecord(anyString(), anyString(), anyString(), any... | @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecord... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... |
@Test public void shouldEmitErrorOnHarvestingException() throws HarvesterException { Tuple anchorTuple = mock(TupleImpl.class); when(harvester.harvestRecord(anyString(), anyString(), anyString(), any(XPathExpression.class), any(XPathExpression.class))).thenThrow(new HarvesterException("Some!")); StormTaskTuple task = t... | @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecord... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... |
@Test public void harvestingForEmptyUrl() { Tuple anchorTuple = mock(TupleImpl.class); StormTaskTuple task = taskWithoutResourceUrl(); recordHarvestingBolt.execute(anchorTuple, task); verifyErrorEmit(); } | @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecord... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... |
@Test public void harvestingForEmptyRecordId() { Tuple anchorTuple = mock(TupleImpl.class); StormTaskTuple task = taskWithoutRecordId(); recordHarvestingBolt.execute(anchorTuple, task); verifyErrorEmit(); } | @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecord... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... |
@Test public void harvestForEmptyPrefix() { Tuple anchorTuple = mock(TupleImpl.class); StormTaskTuple task = taskWithoutPrefix(); recordHarvestingBolt.execute(anchorTuple, task); verifyErrorEmit(); } | @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecord... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... | RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocatio... |
@Test public void shouldEmmitNotificationWhenDataSetListHasOneElement() throws MCSException, IOException { when(fileServiceClient.getFile(eq(FILE_URL),eq(AUTHORIZATION), eq(AUTHORIZATION_HEADER))).thenReturn(null); verifyMethodExecutionNumber(1, 0, FILE_URL); } | private InputStream getFile(FileServiceClient fileClient, String file, String authorization) throws MCSException, IOException { int retries = DEFAULT_RETRIES; while (true) { try { return fileClient.getFile(file, AUTHORIZATION, authorization); } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while getting... | ReadFileBolt extends AbstractDpsBolt { private InputStream getFile(FileServiceClient fileClient, String file, String authorization) throws MCSException, IOException { int retries = DEFAULT_RETRIES; while (true) { try { return fileClient.getFile(file, AUTHORIZATION, authorization); } catch (Exception e) { if (retries-- ... | ReadFileBolt extends AbstractDpsBolt { private InputStream getFile(FileServiceClient fileClient, String file, String authorization) throws MCSException, IOException { int retries = DEFAULT_RETRIES; while (true) { try { return fileClient.getFile(file, AUTHORIZATION, authorization); } catch (Exception e) { if (retries-- ... | ReadFileBolt extends AbstractDpsBolt { private InputStream getFile(FileServiceClient fileClient, String file, String authorization) throws MCSException, IOException { int retries = DEFAULT_RETRIES; while (true) { try { return fileClient.getFile(file, AUTHORIZATION, authorization); } catch (Exception e) { if (retries-- ... | ReadFileBolt extends AbstractDpsBolt { private InputStream getFile(FileServiceClient fileClient, String file, String authorization) throws MCSException, IOException { int retries = DEFAULT_RETRIES; while (true) { try { return fileClient.getFile(file, AUTHORIZATION, authorization); } catch (Exception e) { if (retries-- ... |
@Test public void shouldRetry3TimesBeforeFailingWhenThrowingMCSException() throws MCSException, IOException { doThrow(MCSException.class).when(fileServiceClient).getFile(eq(FILE_URL),eq(AUTHORIZATION), eq(AUTHORIZATION_HEADER)); verifyMethodExecutionNumber(4, 1, FILE_URL); } | private InputStream getFile(FileServiceClient fileClient, String file, String authorization) throws MCSException, IOException { int retries = DEFAULT_RETRIES; while (true) { try { return fileClient.getFile(file, AUTHORIZATION, authorization); } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while getting... | ReadFileBolt extends AbstractDpsBolt { private InputStream getFile(FileServiceClient fileClient, String file, String authorization) throws MCSException, IOException { int retries = DEFAULT_RETRIES; while (true) { try { return fileClient.getFile(file, AUTHORIZATION, authorization); } catch (Exception e) { if (retries-- ... | ReadFileBolt extends AbstractDpsBolt { private InputStream getFile(FileServiceClient fileClient, String file, String authorization) throws MCSException, IOException { int retries = DEFAULT_RETRIES; while (true) { try { return fileClient.getFile(file, AUTHORIZATION, authorization); } catch (Exception e) { if (retries-- ... | ReadFileBolt extends AbstractDpsBolt { private InputStream getFile(FileServiceClient fileClient, String file, String authorization) throws MCSException, IOException { int retries = DEFAULT_RETRIES; while (true) { try { return fileClient.getFile(file, AUTHORIZATION, authorization); } catch (Exception e) { if (retries-- ... | ReadFileBolt extends AbstractDpsBolt { private InputStream getFile(FileServiceClient fileClient, String file, String authorization) throws MCSException, IOException { int retries = DEFAULT_RETRIES; while (true) { try { return fileClient.getFile(file, AUTHORIZATION, authorization); } catch (Exception e) { if (retries-- ... |
@Test public void shouldRetry3TimesBeforeFailingWhenThrowingDriverException() throws MCSException, IOException { doThrow(DriverException.class).when(fileServiceClient).getFile(eq(FILE_URL),eq(AUTHORIZATION), eq(AUTHORIZATION_HEADER)); verifyMethodExecutionNumber(4, 1, FILE_URL); } | private InputStream getFile(FileServiceClient fileClient, String file, String authorization) throws MCSException, IOException { int retries = DEFAULT_RETRIES; while (true) { try { return fileClient.getFile(file, AUTHORIZATION, authorization); } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while getting... | ReadFileBolt extends AbstractDpsBolt { private InputStream getFile(FileServiceClient fileClient, String file, String authorization) throws MCSException, IOException { int retries = DEFAULT_RETRIES; while (true) { try { return fileClient.getFile(file, AUTHORIZATION, authorization); } catch (Exception e) { if (retries-- ... | ReadFileBolt extends AbstractDpsBolt { private InputStream getFile(FileServiceClient fileClient, String file, String authorization) throws MCSException, IOException { int retries = DEFAULT_RETRIES; while (true) { try { return fileClient.getFile(file, AUTHORIZATION, authorization); } catch (Exception e) { if (retries-- ... | ReadFileBolt extends AbstractDpsBolt { private InputStream getFile(FileServiceClient fileClient, String file, String authorization) throws MCSException, IOException { int retries = DEFAULT_RETRIES; while (true) { try { return fileClient.getFile(file, AUTHORIZATION, authorization); } catch (Exception e) { if (retries-- ... | ReadFileBolt extends AbstractDpsBolt { private InputStream getFile(FileServiceClient fileClient, String file, String authorization) throws MCSException, IOException { int retries = DEFAULT_RETRIES; while (true) { try { return fileClient.getFile(file, AUTHORIZATION, authorization); } catch (Exception e) { if (retries-- ... |
@Test public void successfulExecuteStormTupleWithExistedCloudId() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); CloudId cloudId = mock(CloudId.class); when(cloudId.getId()).thenReturn(SOURCE + CLOUD_ID); when(uisClient.getCloudId(SOURCE + DATA_PROVIDER, SOURCE + LOCAL_ID,AUTHORIZATION,AUTHORIZATION_HEAD... | private String getCloudId(String authorizationHeader, String providerId, String localId, String additionalLocalIdentifier) throws CloudException { String result; CloudId cloudId; cloudId = getCloudId(providerId, localId, authorizationHeader); if (cloudId != null) { result = cloudId.getId(); } else { result = createClou... | HarvestingWriteRecordBolt extends WriteRecordBolt { private String getCloudId(String authorizationHeader, String providerId, String localId, String additionalLocalIdentifier) throws CloudException { String result; CloudId cloudId; cloudId = getCloudId(providerId, localId, authorizationHeader); if (cloudId != null) { re... | HarvestingWriteRecordBolt extends WriteRecordBolt { private String getCloudId(String authorizationHeader, String providerId, String localId, String additionalLocalIdentifier) throws CloudException { String result; CloudId cloudId; cloudId = getCloudId(providerId, localId, authorizationHeader); if (cloudId != null) { re... | HarvestingWriteRecordBolt extends WriteRecordBolt { private String getCloudId(String authorizationHeader, String providerId, String localId, String additionalLocalIdentifier) throws CloudException { String result; CloudId cloudId; cloudId = getCloudId(providerId, localId, authorizationHeader); if (cloudId != null) { re... | HarvestingWriteRecordBolt extends WriteRecordBolt { private String getCloudId(String authorizationHeader, String providerId, String localId, String additionalLocalIdentifier) throws CloudException { String result; CloudId cloudId; cloudId = getCloudId(providerId, localId, authorizationHeader); if (cloudId != null) { re... |
@Test public void shouldExecuteTheRemovalOnListOfTASKS() throws IOException { removerInvoker.executeInvokerForListOfTasks("src/test/resources/taskIds.csv", true); verify(remover, times(6)).removeNotifications(anyLong()); verify(remover, times(6)).removeStatistics((anyLong())); verify(remover, times(6)).removeErrorRepor... | public void executeInvokerForListOfTasks(String filePath, boolean shouldRemoveErrors) throws IOException { TaskIdsReader reader = new CommaSeparatorReaderImpl(); List<String> taskIds = reader.getTaskIds(filePath); for (String taskId : taskIds) { executeInvokerForSingleTask(Long.valueOf(taskId), shouldRemoveErrors); } } | RemoverInvoker { public void executeInvokerForListOfTasks(String filePath, boolean shouldRemoveErrors) throws IOException { TaskIdsReader reader = new CommaSeparatorReaderImpl(); List<String> taskIds = reader.getTaskIds(filePath); for (String taskId : taskIds) { executeInvokerForSingleTask(Long.valueOf(taskId), shouldR... | RemoverInvoker { public void executeInvokerForListOfTasks(String filePath, boolean shouldRemoveErrors) throws IOException { TaskIdsReader reader = new CommaSeparatorReaderImpl(); List<String> taskIds = reader.getTaskIds(filePath); for (String taskId : taskIds) { executeInvokerForSingleTask(Long.valueOf(taskId), shouldR... | RemoverInvoker { public void executeInvokerForListOfTasks(String filePath, boolean shouldRemoveErrors) throws IOException { TaskIdsReader reader = new CommaSeparatorReaderImpl(); List<String> taskIds = reader.getTaskIds(filePath); for (String taskId : taskIds) { executeInvokerForSingleTask(Long.valueOf(taskId), shouldR... | RemoverInvoker { public void executeInvokerForListOfTasks(String filePath, boolean shouldRemoveErrors) throws IOException { TaskIdsReader reader = new CommaSeparatorReaderImpl(); List<String> taskIds = reader.getTaskIds(filePath); for (String taskId : taskIds) { executeInvokerForSingleTask(Long.valueOf(taskId), shouldR... |
@Test public void shouldRetry3TimesBeforeFailingWhenThrowingMCSException() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); CloudId cloudId = mock(CloudId.class); when(cloudId.getId()).thenReturn(SOURCE + CLOUD_ID); when(uisClient.getCloudId(SOURCE + DATA_PROVIDER, SOURCE + LOCAL_ID,AUTHORIZATION,AUTHORIZA... | private String getCloudId(String authorizationHeader, String providerId, String localId, String additionalLocalIdentifier) throws CloudException { String result; CloudId cloudId; cloudId = getCloudId(providerId, localId, authorizationHeader); if (cloudId != null) { result = cloudId.getId(); } else { result = createClou... | HarvestingWriteRecordBolt extends WriteRecordBolt { private String getCloudId(String authorizationHeader, String providerId, String localId, String additionalLocalIdentifier) throws CloudException { String result; CloudId cloudId; cloudId = getCloudId(providerId, localId, authorizationHeader); if (cloudId != null) { re... | HarvestingWriteRecordBolt extends WriteRecordBolt { private String getCloudId(String authorizationHeader, String providerId, String localId, String additionalLocalIdentifier) throws CloudException { String result; CloudId cloudId; cloudId = getCloudId(providerId, localId, authorizationHeader); if (cloudId != null) { re... | HarvestingWriteRecordBolt extends WriteRecordBolt { private String getCloudId(String authorizationHeader, String providerId, String localId, String additionalLocalIdentifier) throws CloudException { String result; CloudId cloudId; cloudId = getCloudId(providerId, localId, authorizationHeader); if (cloudId != null) { re... | HarvestingWriteRecordBolt extends WriteRecordBolt { private String getCloudId(String authorizationHeader, String providerId, String localId, String additionalLocalIdentifier) throws CloudException { String result; CloudId cloudId; cloudId = getCloudId(providerId, localId, authorizationHeader); if (cloudId != null) { re... |
@Test public void shouldRetry3TimesBeforeFailingWhenThrowingDriverException() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); CloudId cloudId = mock(CloudId.class); when(cloudId.getId()).thenReturn(SOURCE + CLOUD_ID); when(uisClient.getCloudId(SOURCE + DATA_PROVIDER, SOURCE + LOCAL_ID,AUTHORIZATION,AUTHOR... | private String getCloudId(String authorizationHeader, String providerId, String localId, String additionalLocalIdentifier) throws CloudException { String result; CloudId cloudId; cloudId = getCloudId(providerId, localId, authorizationHeader); if (cloudId != null) { result = cloudId.getId(); } else { result = createClou... | HarvestingWriteRecordBolt extends WriteRecordBolt { private String getCloudId(String authorizationHeader, String providerId, String localId, String additionalLocalIdentifier) throws CloudException { String result; CloudId cloudId; cloudId = getCloudId(providerId, localId, authorizationHeader); if (cloudId != null) { re... | HarvestingWriteRecordBolt extends WriteRecordBolt { private String getCloudId(String authorizationHeader, String providerId, String localId, String additionalLocalIdentifier) throws CloudException { String result; CloudId cloudId; cloudId = getCloudId(providerId, localId, authorizationHeader); if (cloudId != null) { re... | HarvestingWriteRecordBolt extends WriteRecordBolt { private String getCloudId(String authorizationHeader, String providerId, String localId, String additionalLocalIdentifier) throws CloudException { String result; CloudId cloudId; cloudId = getCloudId(providerId, localId, authorizationHeader); if (cloudId != null) { re... | HarvestingWriteRecordBolt extends WriteRecordBolt { private String getCloudId(String authorizationHeader, String providerId, String localId, String additionalLocalIdentifier) throws CloudException { String result; CloudId cloudId; cloudId = getCloudId(providerId, localId, authorizationHeader); if (cloudId != null) { re... |
@Test public void shouldParseFileAndEmitResources() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); try (InputStream stream = this.getClass().getResourceAsStream("/files/Item_35834473.xml")) { when(fileClient.getFile(eq(FILE_URL), eq(AUTHORIZATION), eq(AUTHORIZATION))).thenReturn(stream); when(taskStatusC... | @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); int linksCount = getLinksCount(stormTas... | ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); in... | ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); in... | ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); in... | ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); in... |
@Test public void shouldDropTaskAndStopEmitting() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); try (InputStream stream = this.getClass().getResourceAsStream("/files/Item_35834473.xml")) { when(fileClient.getFile(eq(FILE_URL), eq(AUTHORIZATION), eq(AUTHORIZATION))).thenReturn(stream); when(taskStatusChe... | @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); int linksCount = getLinksCount(stormTas... | ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); in... | ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); in... | ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); in... | ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); in... |
@Test public void shouldParseFileWithEmptyResourcesAndForwardOneTuple() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); try (InputStream stream = this.getClass().getResourceAsStream("/files/no-resources.xml")) { when(fileClient.getFile(eq(FILE_URL), eq(AUTHORIZATION), eq(AUTHORIZATION))).thenReturn(stream... | @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); int linksCount = getLinksCount(stormTas... | ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); in... | ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); in... | ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); in... | ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); in... |
@Test public void shouldEmitErrorWhenDownloadFileFails() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); doThrow(IOException.class).when(fileClient).getFile(eq(FILE_URL), eq(AUTHORIZATION), eq(AUTHORIZATION)); parseFileBolt.execute(anchorTuple, stormTaskTuple); verify(outputCollector, Mockito.times(1)).em... | @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); int linksCount = getLinksCount(stormTas... | ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); in... | ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); in... | ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); in... | ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); in... |
@Test public void shouldEmitErrorWhenGettingResourceLinksFails() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); try (InputStream stream = this.getClass().getResourceAsStream("/files/broken.xml")) { when(fileClient.getFile(eq(FILE_URL), eq(AUTHORIZATION), eq(AUTHORIZATION))).thenReturn(stream); parseFileB... | @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); int linksCount = getLinksCount(stormTas... | ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); in... | ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); in... | ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); in... | ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); in... |
@Test public void shouldRetry3TimesBeforeFailingWhenThrowingMCSException() throws MCSException { stormTaskTuple = prepareTupleWithSingleDataSet(); doThrow(MCSException.class).when(dataSetServiceClient).assignRepresentationToDataSet(anyString(), anyString(), anyString(), anyString(), anyString(),eq(AUTHORIZATION),eq(AUT... | private void assignRepresentationToDataSet(DataSet dataSet, Representation resultRepresentation, String authorizationHeader) throws MCSException { int retries = DEFAULT_RETRIES; while (true) { try { dataSetServiceClient.assignRepresentationToDataSet( dataSet.getProviderId(), dataSet.getId(), resultRepresentation.getClo... | AddResultToDataSetBolt extends AbstractDpsBolt { private void assignRepresentationToDataSet(DataSet dataSet, Representation resultRepresentation, String authorizationHeader) throws MCSException { int retries = DEFAULT_RETRIES; while (true) { try { dataSetServiceClient.assignRepresentationToDataSet( dataSet.getProviderI... | AddResultToDataSetBolt extends AbstractDpsBolt { private void assignRepresentationToDataSet(DataSet dataSet, Representation resultRepresentation, String authorizationHeader) throws MCSException { int retries = DEFAULT_RETRIES; while (true) { try { dataSetServiceClient.assignRepresentationToDataSet( dataSet.getProviderI... | AddResultToDataSetBolt extends AbstractDpsBolt { private void assignRepresentationToDataSet(DataSet dataSet, Representation resultRepresentation, String authorizationHeader) throws MCSException { int retries = DEFAULT_RETRIES; while (true) { try { dataSetServiceClient.assignRepresentationToDataSet( dataSet.getProviderI... | AddResultToDataSetBolt extends AbstractDpsBolt { private void assignRepresentationToDataSet(DataSet dataSet, Representation resultRepresentation, String authorizationHeader) throws MCSException { int retries = DEFAULT_RETRIES; while (true) { try { dataSetServiceClient.assignRepresentationToDataSet( dataSet.getProviderI... |
@Test public void shouldRetry3TimesBeforeFailingWhenThrowingDriverException() throws MCSException { stormTaskTuple = prepareTupleWithSingleDataSet(); doThrow(DriverException.class).when(dataSetServiceClient).assignRepresentationToDataSet(anyString(), anyString(), anyString(), anyString(), anyString(),eq(AUTHORIZATION),... | private void assignRepresentationToDataSet(DataSet dataSet, Representation resultRepresentation, String authorizationHeader) throws MCSException { int retries = DEFAULT_RETRIES; while (true) { try { dataSetServiceClient.assignRepresentationToDataSet( dataSet.getProviderId(), dataSet.getId(), resultRepresentation.getClo... | AddResultToDataSetBolt extends AbstractDpsBolt { private void assignRepresentationToDataSet(DataSet dataSet, Representation resultRepresentation, String authorizationHeader) throws MCSException { int retries = DEFAULT_RETRIES; while (true) { try { dataSetServiceClient.assignRepresentationToDataSet( dataSet.getProviderI... | AddResultToDataSetBolt extends AbstractDpsBolt { private void assignRepresentationToDataSet(DataSet dataSet, Representation resultRepresentation, String authorizationHeader) throws MCSException { int retries = DEFAULT_RETRIES; while (true) { try { dataSetServiceClient.assignRepresentationToDataSet( dataSet.getProviderI... | AddResultToDataSetBolt extends AbstractDpsBolt { private void assignRepresentationToDataSet(DataSet dataSet, Representation resultRepresentation, String authorizationHeader) throws MCSException { int retries = DEFAULT_RETRIES; while (true) { try { dataSetServiceClient.assignRepresentationToDataSet( dataSet.getProviderI... | AddResultToDataSetBolt extends AbstractDpsBolt { private void assignRepresentationToDataSet(DataSet dataSet, Representation resultRepresentation, String authorizationHeader) throws MCSException { int retries = DEFAULT_RETRIES; while (true) { try { dataSetServiceClient.assignRepresentationToDataSet( dataSet.getProviderI... |
@Test public void testAddingToQueueSuccessfully() throws Exception { when(taskStatusChecker.hasKillFlag(anyLong())).thenReturn(false); Representation representation = testHelper.prepareRepresentation(SOURCE + CLOUD_ID, SOURCE + REPRESENTATION_NAME, SOURCE + VERSION, SOURCE_VERSION_URL, DATA_PROVIDER, false, new Date())... | public int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation) { int count = 0; final long taskId = stormTaskTuple.getTaskId(); if (representation != null) { for (eu.europeana.cloud.common.model.File file : representation.getFiles()) { String fileUrl = ""; ... | QueueFiller { public int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation) { int count = 0; final long taskId = stormTaskTuple.getTaskId(); if (representation != null) { for (eu.europeana.cloud.common.model.File file : representation.getFiles()) { String ... | QueueFiller { public int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation) { int count = 0; final long taskId = stormTaskTuple.getTaskId(); if (representation != null) { for (eu.europeana.cloud.common.model.File file : representation.getFiles()) { String ... | QueueFiller { public int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation) { int count = 0; final long taskId = stormTaskTuple.getTaskId(); if (representation != null) { for (eu.europeana.cloud.common.model.File file : representation.getFiles()) { String ... | QueueFiller { public int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation) { int count = 0; final long taskId = stormTaskTuple.getTaskId(); if (representation != null) { for (eu.europeana.cloud.common.model.File file : representation.getFiles()) { String ... |
@Test public void shouldCreateTheCorrectFilePath() throws Exception { String filePath = FileUtil.createFilePath(FOLDER_PATH, FILE_NAME_WITHOUT_EXTENSION, EXTENSION); assertEquals(filePath, FILE_PATH); filePath = FileUtil.createFilePath(FOLDER_PATH, FILE_NAME_WITH_EXTENSION, EXTENSION); assertEquals(filePath, FILE_PATH)... | public static String createFilePath(String folderPath, String fileName, String extension) { String filePtah = folderPath + fileName; if ("".equals(FilenameUtils.getExtension(fileName))) filePtah = filePtah + extension; return filePtah; } | FileUtil { public static String createFilePath(String folderPath, String fileName, String extension) { String filePtah = folderPath + fileName; if ("".equals(FilenameUtils.getExtension(fileName))) filePtah = filePtah + extension; return filePtah; } } | FileUtil { public static String createFilePath(String folderPath, String fileName, String extension) { String filePtah = folderPath + fileName; if ("".equals(FilenameUtils.getExtension(fileName))) filePtah = filePtah + extension; return filePtah; } } | FileUtil { public static String createFilePath(String folderPath, String fileName, String extension) { String filePtah = folderPath + fileName; if ("".equals(FilenameUtils.getExtension(fileName))) filePtah = filePtah + extension; return filePtah; } static void persistStreamToFile(InputStream inputStream, String folder... | FileUtil { public static String createFilePath(String folderPath, String fileName, String extension) { String filePtah = folderPath + fileName; if ("".equals(FilenameUtils.getExtension(fileName))) filePtah = filePtah + extension; return filePtah; } static void persistStreamToFile(InputStream inputStream, String folder... |
@Test public void testKillingTheTaskEffectOnQueue() throws Exception { when(taskStatusChecker.hasKillFlag(anyLong())).thenReturn(false, false, false, true); Representation representation = testHelper.prepareRepresentation(SOURCE + CLOUD_ID, SOURCE + REPRESENTATION_NAME, SOURCE + VERSION, SOURCE_VERSION_URL, DATA_PROVID... | public int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation) { int count = 0; final long taskId = stormTaskTuple.getTaskId(); if (representation != null) { for (eu.europeana.cloud.common.model.File file : representation.getFiles()) { String fileUrl = ""; ... | QueueFiller { public int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation) { int count = 0; final long taskId = stormTaskTuple.getTaskId(); if (representation != null) { for (eu.europeana.cloud.common.model.File file : representation.getFiles()) { String ... | QueueFiller { public int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation) { int count = 0; final long taskId = stormTaskTuple.getTaskId(); if (representation != null) { for (eu.europeana.cloud.common.model.File file : representation.getFiles()) { String ... | QueueFiller { public int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation) { int count = 0; final long taskId = stormTaskTuple.getTaskId(); if (representation != null) { for (eu.europeana.cloud.common.model.File file : representation.getFiles()) { String ... | QueueFiller { public int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation) { int count = 0; final long taskId = stormTaskTuple.getTaskId(); if (representation != null) { for (eu.europeana.cloud.common.model.File file : representation.getFiles()) { String ... |
@Test public void shouldEmitErrorsInCaseOfExceptionWhileGettingTheFiles() throws Exception { when(taskStatusChecker.hasKillFlag(anyLong())).thenReturn(false); Representation representation = testHelper.prepareRepresentation(SOURCE + CLOUD_ID, SOURCE + REPRESENTATION_NAME, SOURCE + VERSION, SOURCE_VERSION_URL, DATA_PROV... | public int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation) { int count = 0; final long taskId = stormTaskTuple.getTaskId(); if (representation != null) { for (eu.europeana.cloud.common.model.File file : representation.getFiles()) { String fileUrl = ""; ... | QueueFiller { public int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation) { int count = 0; final long taskId = stormTaskTuple.getTaskId(); if (representation != null) { for (eu.europeana.cloud.common.model.File file : representation.getFiles()) { String ... | QueueFiller { public int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation) { int count = 0; final long taskId = stormTaskTuple.getTaskId(); if (representation != null) { for (eu.europeana.cloud.common.model.File file : representation.getFiles()) { String ... | QueueFiller { public int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation) { int count = 0; final long taskId = stormTaskTuple.getTaskId(); if (representation != null) { for (eu.europeana.cloud.common.model.File file : representation.getFiles()) { String ... | QueueFiller { public int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation) { int count = 0; final long taskId = stormTaskTuple.getTaskId(); if (representation != null) { for (eu.europeana.cloud.common.model.File file : representation.getFiles()) { String ... |
@Test public void shouldEmitTheFilesWhenNoRevisionIsSpecified() throws Exception { when(taskStatusChecker.hasKillFlag(anyLong())).thenReturn(false); when(collector.emit(anyListOf(Object.class))).thenReturn(null); List<String> dataSets = new ArrayList<>(); dataSets.add(DATASET_URL); DpsTask dpsTask = prepareDpsTask(data... | @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } | TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } } | TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } TaskExecutor(Spo... | TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } TaskExecutor(Spo... | TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } TaskExecutor(Spo... |
@Test public void shouldFailWhenReadFileThrowMCSExceptionWhenNoRevisionIsSpecified() throws Exception { when(taskStatusChecker.hasKillFlag(anyLong())).thenReturn(false); when(collector.emit(anyListOf(Object.class))).thenReturn(null); List<String> dataSets = new ArrayList<>(); dataSets.add(DATASET_URL); DpsTask dpsTask ... | @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } | TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } } | TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } TaskExecutor(Spo... | TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } TaskExecutor(Spo... | TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } TaskExecutor(Spo... |
@Test public void shouldFailPerEachFileWhenReadFileThrowDriverExceptionWhenNoRevisionIsSpecified() throws Exception { when(taskStatusChecker.hasKillFlag(anyLong())).thenReturn(false); when(collector.emit(anyListOf(Object.class))).thenReturn(null); List<String> dataSets = new ArrayList<>(); dataSets.add(DATASET_URL); Dp... | @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } | TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } } | TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } TaskExecutor(Spo... | TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } TaskExecutor(Spo... | TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } TaskExecutor(Spo... |
@Test public void shouldStopEmittingFilesWhenTaskIsKilled() throws Exception { when(taskStatusChecker.hasKillFlag(anyLong())).thenReturn(false, false, true); when(collector.emit(anyListOf(Object.class))).thenReturn(null); List<String> dataSets = new ArrayList<>(); dataSets.add(DATASET_URL); DpsTask dpsTask = prepareDps... | @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } | TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } } | TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } TaskExecutor(Spo... | TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } TaskExecutor(Spo... | TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } TaskExecutor(Spo... |
@Test public void shouldNotEmitAnyFilesWhenTaskIsKilledBeforeIteratingRepresentation() throws Exception { when(taskStatusChecker.hasKillFlag(anyLong())).thenReturn(true); when(collector.emit(anyListOf(Object.class))).thenReturn(null); List<String> dataSets = new ArrayList<>(); dataSets.add(DATASET_URL); DpsTask dpsTask... | @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } | TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } } | TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } TaskExecutor(Spo... | TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } TaskExecutor(Spo... | TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } TaskExecutor(Spo... |
@Test public void testLoadingDefaultPropertiesFile() throws FileNotFoundException, IOException { reader.loadDefaultPropertyFile(DEFAULT_PROPERTIES_FILE, topologyProperties); assertNotNull(topologyProperties); assertFalse(topologyProperties.isEmpty()); for (final Map.Entry<Object, Object> e : topologyProperties.entrySet... | public void loadDefaultPropertyFile(String defaultPropertyFile, Properties topologyProperties) throws IOException { InputStream propertiesInputStream = Thread.currentThread() .getContextClassLoader().getResourceAsStream(defaultPropertyFile); if (propertiesInputStream == null) throw new FileNotFoundException(); topology... | PropertyFileLoader { public void loadDefaultPropertyFile(String defaultPropertyFile, Properties topologyProperties) throws IOException { InputStream propertiesInputStream = Thread.currentThread() .getContextClassLoader().getResourceAsStream(defaultPropertyFile); if (propertiesInputStream == null) throw new FileNotFound... | PropertyFileLoader { public void loadDefaultPropertyFile(String defaultPropertyFile, Properties topologyProperties) throws IOException { InputStream propertiesInputStream = Thread.currentThread() .getContextClassLoader().getResourceAsStream(defaultPropertyFile); if (propertiesInputStream == null) throw new FileNotFound... | PropertyFileLoader { public void loadDefaultPropertyFile(String defaultPropertyFile, Properties topologyProperties) throws IOException { InputStream propertiesInputStream = Thread.currentThread() .getContextClassLoader().getResourceAsStream(defaultPropertyFile); if (propertiesInputStream == null) throw new FileNotFound... | PropertyFileLoader { public void loadDefaultPropertyFile(String defaultPropertyFile, Properties topologyProperties) throws IOException { InputStream propertiesInputStream = Thread.currentThread() .getContextClassLoader().getResourceAsStream(defaultPropertyFile); if (propertiesInputStream == null) throw new FileNotFound... |
@Test public void testLoadingProvidedPropertiesFile() throws FileNotFoundException, IOException { reader.loadProvidedPropertyFile(PROVIDED_PROPERTIES_FILE, topologyProperties); assertNotNull(topologyProperties); assertFalse(topologyProperties.isEmpty()); for (final Map.Entry<Object, Object> e : topologyProperties.entry... | public void loadProvidedPropertyFile(String fileName, Properties topologyProperties) throws IOException { File file = new File(fileName); FileInputStream fileInput = new FileInputStream(file); topologyProperties.load(fileInput); fileInput.close(); } | PropertyFileLoader { public void loadProvidedPropertyFile(String fileName, Properties topologyProperties) throws IOException { File file = new File(fileName); FileInputStream fileInput = new FileInputStream(file); topologyProperties.load(fileInput); fileInput.close(); } } | PropertyFileLoader { public void loadProvidedPropertyFile(String fileName, Properties topologyProperties) throws IOException { File file = new File(fileName); FileInputStream fileInput = new FileInputStream(file); topologyProperties.load(fileInput); fileInput.close(); } } | PropertyFileLoader { public void loadProvidedPropertyFile(String fileName, Properties topologyProperties) throws IOException { File file = new File(fileName); FileInputStream fileInput = new FileInputStream(file); topologyProperties.load(fileInput); fileInput.close(); } static void loadPropertyFile(String defaultPrope... | PropertyFileLoader { public void loadProvidedPropertyFile(String fileName, Properties topologyProperties) throws IOException { File file = new File(fileName); FileInputStream fileInput = new FileInputStream(file); topologyProperties.load(fileInput); fileInput.close(); } static void loadPropertyFile(String defaultPrope... |
@Test(expected = FileNotFoundException.class) public void testLoadingNonExistedDefaultFile() throws FileNotFoundException, IOException { reader.loadDefaultPropertyFile("NON_EXISTED_FILE", topologyProperties); } | public void loadDefaultPropertyFile(String defaultPropertyFile, Properties topologyProperties) throws IOException { InputStream propertiesInputStream = Thread.currentThread() .getContextClassLoader().getResourceAsStream(defaultPropertyFile); if (propertiesInputStream == null) throw new FileNotFoundException(); topology... | PropertyFileLoader { public void loadDefaultPropertyFile(String defaultPropertyFile, Properties topologyProperties) throws IOException { InputStream propertiesInputStream = Thread.currentThread() .getContextClassLoader().getResourceAsStream(defaultPropertyFile); if (propertiesInputStream == null) throw new FileNotFound... | PropertyFileLoader { public void loadDefaultPropertyFile(String defaultPropertyFile, Properties topologyProperties) throws IOException { InputStream propertiesInputStream = Thread.currentThread() .getContextClassLoader().getResourceAsStream(defaultPropertyFile); if (propertiesInputStream == null) throw new FileNotFound... | PropertyFileLoader { public void loadDefaultPropertyFile(String defaultPropertyFile, Properties topologyProperties) throws IOException { InputStream propertiesInputStream = Thread.currentThread() .getContextClassLoader().getResourceAsStream(defaultPropertyFile); if (propertiesInputStream == null) throw new FileNotFound... | PropertyFileLoader { public void loadDefaultPropertyFile(String defaultPropertyFile, Properties topologyProperties) throws IOException { InputStream propertiesInputStream = Thread.currentThread() .getContextClassLoader().getResourceAsStream(defaultPropertyFile); if (propertiesInputStream == null) throw new FileNotFound... |
@Test public void testCreateZipFolderPath() { Date date = new Date(); DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss-ssss"); String expectedFolderName = ECLOUD_SUFFIX + "-" + dateFormat.format(date); String folderPath = FileUtil.createZipFolderPath(date); String extension = FilenameUtils.getExtension... | public static String createZipFolderPath(Date date) { String folderName = generateFolderName(date); return System.getProperty("user.dir") + "/" + folderName + ZIP_FORMAT_EXTENSION; } | FileUtil { public static String createZipFolderPath(Date date) { String folderName = generateFolderName(date); return System.getProperty("user.dir") + "/" + folderName + ZIP_FORMAT_EXTENSION; } } | FileUtil { public static String createZipFolderPath(Date date) { String folderName = generateFolderName(date); return System.getProperty("user.dir") + "/" + folderName + ZIP_FORMAT_EXTENSION; } } | FileUtil { public static String createZipFolderPath(Date date) { String folderName = generateFolderName(date); return System.getProperty("user.dir") + "/" + folderName + ZIP_FORMAT_EXTENSION; } static void persistStreamToFile(InputStream inputStream, String folderPath, String fileName, String extension); static String... | FileUtil { public static String createZipFolderPath(Date date) { String folderName = generateFolderName(date); return System.getProperty("user.dir") + "/" + folderName + ZIP_FORMAT_EXTENSION; } static void persistStreamToFile(InputStream inputStream, String folderPath, String fileName, String extension); static String... |
@Test(expected = FileNotFoundException.class) public void testLoadingNonExistedProvidedFile() throws FileNotFoundException, IOException { reader.loadProvidedPropertyFile("NON_EXISTED_FILE", topologyProperties); } | public void loadProvidedPropertyFile(String fileName, Properties topologyProperties) throws IOException { File file = new File(fileName); FileInputStream fileInput = new FileInputStream(file); topologyProperties.load(fileInput); fileInput.close(); } | PropertyFileLoader { public void loadProvidedPropertyFile(String fileName, Properties topologyProperties) throws IOException { File file = new File(fileName); FileInputStream fileInput = new FileInputStream(file); topologyProperties.load(fileInput); fileInput.close(); } } | PropertyFileLoader { public void loadProvidedPropertyFile(String fileName, Properties topologyProperties) throws IOException { File file = new File(fileName); FileInputStream fileInput = new FileInputStream(file); topologyProperties.load(fileInput); fileInput.close(); } } | PropertyFileLoader { public void loadProvidedPropertyFile(String fileName, Properties topologyProperties) throws IOException { File file = new File(fileName); FileInputStream fileInput = new FileInputStream(file); topologyProperties.load(fileInput); fileInput.close(); } static void loadPropertyFile(String defaultPrope... | PropertyFileLoader { public void loadProvidedPropertyFile(String fileName, Properties topologyProperties) throws IOException { File file = new File(fileName); FileInputStream fileInput = new FileInputStream(file); topologyProperties.load(fileInput); fileInput.close(); } static void loadPropertyFile(String defaultPrope... |
@Test public void testLoadingFileWhenProvidedPropertyFileNotExisted() throws FileNotFoundException, IOException { PropertyFileLoader.loadPropertyFile(DEFAULT_PROPERTIES_FILE, "NON_EXISTED_PROVIDED_FILE", topologyProperties); assertNotNull(topologyProperties); assertFalse(topologyProperties.isEmpty()); for (final Map.En... | public static void loadPropertyFile(String defaultPropertyFile, String providedPropertyFile, Properties topologyProperties) { try { PropertyFileLoader reader = new PropertyFileLoader(); reader.loadDefaultPropertyFile(defaultPropertyFile, topologyProperties); if (!"".equals(providedPropertyFile)) { reader.loadProvidedPr... | PropertyFileLoader { public static void loadPropertyFile(String defaultPropertyFile, String providedPropertyFile, Properties topologyProperties) { try { PropertyFileLoader reader = new PropertyFileLoader(); reader.loadDefaultPropertyFile(defaultPropertyFile, topologyProperties); if (!"".equals(providedPropertyFile)) { ... | PropertyFileLoader { public static void loadPropertyFile(String defaultPropertyFile, String providedPropertyFile, Properties topologyProperties) { try { PropertyFileLoader reader = new PropertyFileLoader(); reader.loadDefaultPropertyFile(defaultPropertyFile, topologyProperties); if (!"".equals(providedPropertyFile)) { ... | PropertyFileLoader { public static void loadPropertyFile(String defaultPropertyFile, String providedPropertyFile, Properties topologyProperties) { try { PropertyFileLoader reader = new PropertyFileLoader(); reader.loadDefaultPropertyFile(defaultPropertyFile, topologyProperties); if (!"".equals(providedPropertyFile)) { ... | PropertyFileLoader { public static void loadPropertyFile(String defaultPropertyFile, String providedPropertyFile, Properties topologyProperties) { try { PropertyFileLoader reader = new PropertyFileLoader(); reader.loadDefaultPropertyFile(defaultPropertyFile, topologyProperties); if (!"".equals(providedPropertyFile)) { ... |
@Test public void testLoadingFileWhenDefaultFileNotExists() throws FileNotFoundException, IOException { PropertyFileLoader.loadPropertyFile("NON_EXISTED_DEFAULT_FILE", PROVIDED_PROPERTIES_FILE, topologyProperties); assertTrue(topologyProperties.isEmpty()); } | public static void loadPropertyFile(String defaultPropertyFile, String providedPropertyFile, Properties topologyProperties) { try { PropertyFileLoader reader = new PropertyFileLoader(); reader.loadDefaultPropertyFile(defaultPropertyFile, topologyProperties); if (!"".equals(providedPropertyFile)) { reader.loadProvidedPr... | PropertyFileLoader { public static void loadPropertyFile(String defaultPropertyFile, String providedPropertyFile, Properties topologyProperties) { try { PropertyFileLoader reader = new PropertyFileLoader(); reader.loadDefaultPropertyFile(defaultPropertyFile, topologyProperties); if (!"".equals(providedPropertyFile)) { ... | PropertyFileLoader { public static void loadPropertyFile(String defaultPropertyFile, String providedPropertyFile, Properties topologyProperties) { try { PropertyFileLoader reader = new PropertyFileLoader(); reader.loadDefaultPropertyFile(defaultPropertyFile, topologyProperties); if (!"".equals(providedPropertyFile)) { ... | PropertyFileLoader { public static void loadPropertyFile(String defaultPropertyFile, String providedPropertyFile, Properties topologyProperties) { try { PropertyFileLoader reader = new PropertyFileLoader(); reader.loadDefaultPropertyFile(defaultPropertyFile, topologyProperties); if (!"".equals(providedPropertyFile)) { ... | PropertyFileLoader { public static void loadPropertyFile(String defaultPropertyFile, String providedPropertyFile, Properties topologyProperties) { try { PropertyFileLoader reader = new PropertyFileLoader(); reader.loadDefaultPropertyFile(defaultPropertyFile, topologyProperties); if (!"".equals(providedPropertyFile)) { ... |
@Test public void getTaskStatisticsReport() { List<NodeStatistics> stats = prepareStats(); Mockito.when(cassandraNodeStatisticsDAO.getNodeStatistics(TASK_ID)).thenReturn(stats); Mockito.when(cassandraNodeStatisticsDAO.getStatisticsReport(TASK_ID)).thenReturn(null); StatisticsReport actual = cassandraStatisticsService.g... | @Override public StatisticsReport getTaskStatisticsReport(long taskId) { StatisticsReport report = cassandraNodeStatisticsDAO.getStatisticsReport(taskId); if (report == null) { List<NodeStatistics> nodeStatistics = cassandraNodeStatisticsDAO.getNodeStatistics(taskId); if (nodeStatistics == null || nodeStatistics.isEmpt... | CassandraValidationStatisticsService implements ValidationStatisticsReportService { @Override public StatisticsReport getTaskStatisticsReport(long taskId) { StatisticsReport report = cassandraNodeStatisticsDAO.getStatisticsReport(taskId); if (report == null) { List<NodeStatistics> nodeStatistics = cassandraNodeStatisti... | CassandraValidationStatisticsService implements ValidationStatisticsReportService { @Override public StatisticsReport getTaskStatisticsReport(long taskId) { StatisticsReport report = cassandraNodeStatisticsDAO.getStatisticsReport(taskId); if (report == null) { List<NodeStatistics> nodeStatistics = cassandraNodeStatisti... | CassandraValidationStatisticsService implements ValidationStatisticsReportService { @Override public StatisticsReport getTaskStatisticsReport(long taskId) { StatisticsReport report = cassandraNodeStatisticsDAO.getStatisticsReport(taskId); if (report == null) { List<NodeStatistics> nodeStatistics = cassandraNodeStatisti... | CassandraValidationStatisticsService implements ValidationStatisticsReportService { @Override public StatisticsReport getTaskStatisticsReport(long taskId) { StatisticsReport report = cassandraNodeStatisticsDAO.getStatisticsReport(taskId); if (report == null) { List<NodeStatistics> nodeStatistics = cassandraNodeStatisti... |
@Test public void getStoredTaskStatisticsReport() { StatisticsReport report = new StatisticsReport(TASK_ID, prepareStats()); Mockito.when(cassandraNodeStatisticsDAO.getStatisticsReport(TASK_ID)).thenReturn(report); StatisticsReport actual = cassandraStatisticsService.getTaskStatisticsReport(TASK_ID); Mockito.verify(cas... | @Override public StatisticsReport getTaskStatisticsReport(long taskId) { StatisticsReport report = cassandraNodeStatisticsDAO.getStatisticsReport(taskId); if (report == null) { List<NodeStatistics> nodeStatistics = cassandraNodeStatisticsDAO.getNodeStatistics(taskId); if (nodeStatistics == null || nodeStatistics.isEmpt... | CassandraValidationStatisticsService implements ValidationStatisticsReportService { @Override public StatisticsReport getTaskStatisticsReport(long taskId) { StatisticsReport report = cassandraNodeStatisticsDAO.getStatisticsReport(taskId); if (report == null) { List<NodeStatistics> nodeStatistics = cassandraNodeStatisti... | CassandraValidationStatisticsService implements ValidationStatisticsReportService { @Override public StatisticsReport getTaskStatisticsReport(long taskId) { StatisticsReport report = cassandraNodeStatisticsDAO.getStatisticsReport(taskId); if (report == null) { List<NodeStatistics> nodeStatistics = cassandraNodeStatisti... | CassandraValidationStatisticsService implements ValidationStatisticsReportService { @Override public StatisticsReport getTaskStatisticsReport(long taskId) { StatisticsReport report = cassandraNodeStatisticsDAO.getStatisticsReport(taskId); if (report == null) { List<NodeStatistics> nodeStatistics = cassandraNodeStatisti... | CassandraValidationStatisticsService implements ValidationStatisticsReportService { @Override public StatisticsReport getTaskStatisticsReport(long taskId) { StatisticsReport report = cassandraNodeStatisticsDAO.getStatisticsReport(taskId); if (report == null) { List<NodeStatistics> nodeStatistics = cassandraNodeStatisti... |
@Test public void testExecutionWithMultipleTasks() throws Exception { when(taskInfoDAO.hasKillFlag(TASK_ID)).thenReturn(false, false, false, true, true); when(taskInfoDAO.hasKillFlag(TASK_ID2)).thenReturn(false, false, true); boolean task1killedFlag = false; boolean task2killedFlag = false; for (int i = 0; i < 8; i++) ... | public boolean hasKillFlag(long taskId) { try { return cache.get(taskId); } catch (ExecutionException e) { LOGGER.info(e.getMessage()); return false; } } | TaskStatusChecker { public boolean hasKillFlag(long taskId) { try { return cache.get(taskId); } catch (ExecutionException e) { LOGGER.info(e.getMessage()); return false; } } } | TaskStatusChecker { public boolean hasKillFlag(long taskId) { try { return cache.get(taskId); } catch (ExecutionException e) { LOGGER.info(e.getMessage()); return false; } } private TaskStatusChecker(CassandraConnectionProvider cassandraConnectionProvider); TaskStatusChecker(CassandraTaskInfoDAO taskDAO); } | TaskStatusChecker { public boolean hasKillFlag(long taskId) { try { return cache.get(taskId); } catch (ExecutionException e) { LOGGER.info(e.getMessage()); return false; } } private TaskStatusChecker(CassandraConnectionProvider cassandraConnectionProvider); TaskStatusChecker(CassandraTaskInfoDAO taskDAO); static sync... | TaskStatusChecker { public boolean hasKillFlag(long taskId) { try { return cache.get(taskId); } catch (ExecutionException e) { LOGGER.info(e.getMessage()); return false; } } private TaskStatusChecker(CassandraConnectionProvider cassandraConnectionProvider); TaskStatusChecker(CassandraTaskInfoDAO taskDAO); static sync... |
@Test public void parameterIsProvidedTest() { stormTaskTuple.addParameter(PluginParameterKeys.MIME_TYPE, MIME_TYPE); assertTrue(TaskTupleUtility.isProvidedAsParameter(stormTaskTuple, PluginParameterKeys.MIME_TYPE)); } | protected static boolean isProvidedAsParameter(StormTaskTuple stormTaskTuple, String parameter) { if (stormTaskTuple.getParameter(parameter) != null) { return true; } else { return false; } } | TaskTupleUtility { protected static boolean isProvidedAsParameter(StormTaskTuple stormTaskTuple, String parameter) { if (stormTaskTuple.getParameter(parameter) != null) { return true; } else { return false; } } } | TaskTupleUtility { protected static boolean isProvidedAsParameter(StormTaskTuple stormTaskTuple, String parameter) { if (stormTaskTuple.getParameter(parameter) != null) { return true; } else { return false; } } } | TaskTupleUtility { protected static boolean isProvidedAsParameter(StormTaskTuple stormTaskTuple, String parameter) { if (stormTaskTuple.getParameter(parameter) != null) { return true; } else { return false; } } static String getParameterFromTuple(StormTaskTuple stormTaskTuple, String parameter); } | TaskTupleUtility { protected static boolean isProvidedAsParameter(StormTaskTuple stormTaskTuple, String parameter) { if (stormTaskTuple.getParameter(parameter) != null) { return true; } else { return false; } } static String getParameterFromTuple(StormTaskTuple stormTaskTuple, String parameter); } |
@Test public void parameterIsNotProvidedTest() { assertFalse(TaskTupleUtility.isProvidedAsParameter(stormTaskTuple, PluginParameterKeys.MIME_TYPE)); } | protected static boolean isProvidedAsParameter(StormTaskTuple stormTaskTuple, String parameter) { if (stormTaskTuple.getParameter(parameter) != null) { return true; } else { return false; } } | TaskTupleUtility { protected static boolean isProvidedAsParameter(StormTaskTuple stormTaskTuple, String parameter) { if (stormTaskTuple.getParameter(parameter) != null) { return true; } else { return false; } } } | TaskTupleUtility { protected static boolean isProvidedAsParameter(StormTaskTuple stormTaskTuple, String parameter) { if (stormTaskTuple.getParameter(parameter) != null) { return true; } else { return false; } } } | TaskTupleUtility { protected static boolean isProvidedAsParameter(StormTaskTuple stormTaskTuple, String parameter) { if (stormTaskTuple.getParameter(parameter) != null) { return true; } else { return false; } } static String getParameterFromTuple(StormTaskTuple stormTaskTuple, String parameter); } | TaskTupleUtility { protected static boolean isProvidedAsParameter(StormTaskTuple stormTaskTuple, String parameter) { if (stormTaskTuple.getParameter(parameter) != null) { return true; } else { return false; } } static String getParameterFromTuple(StormTaskTuple stormTaskTuple, String parameter); } |
@Test public void getDefaultValueTest() { assertEquals(TaskTupleUtility.getParameterFromTuple(stormTaskTuple, PluginParameterKeys.MIME_TYPE), PluginParameterKeys.PLUGIN_PARAMETERS.get(PluginParameterKeys.MIME_TYPE)); } | public static String getParameterFromTuple(StormTaskTuple stormTaskTuple, String parameter) { String outputValue = PluginParameterKeys.PLUGIN_PARAMETERS.get(parameter); if (isProvidedAsParameter(stormTaskTuple, parameter)) { outputValue = stormTaskTuple.getParameter(parameter); } return outputValue; } | TaskTupleUtility { public static String getParameterFromTuple(StormTaskTuple stormTaskTuple, String parameter) { String outputValue = PluginParameterKeys.PLUGIN_PARAMETERS.get(parameter); if (isProvidedAsParameter(stormTaskTuple, parameter)) { outputValue = stormTaskTuple.getParameter(parameter); } return outputValue; ... | TaskTupleUtility { public static String getParameterFromTuple(StormTaskTuple stormTaskTuple, String parameter) { String outputValue = PluginParameterKeys.PLUGIN_PARAMETERS.get(parameter); if (isProvidedAsParameter(stormTaskTuple, parameter)) { outputValue = stormTaskTuple.getParameter(parameter); } return outputValue; ... | TaskTupleUtility { public static String getParameterFromTuple(StormTaskTuple stormTaskTuple, String parameter) { String outputValue = PluginParameterKeys.PLUGIN_PARAMETERS.get(parameter); if (isProvidedAsParameter(stormTaskTuple, parameter)) { outputValue = stormTaskTuple.getParameter(parameter); } return outputValue; ... | TaskTupleUtility { public static String getParameterFromTuple(StormTaskTuple stormTaskTuple, String parameter) { String outputValue = PluginParameterKeys.PLUGIN_PARAMETERS.get(parameter); if (isProvidedAsParameter(stormTaskTuple, parameter)) { outputValue = stormTaskTuple.getParameter(parameter); } return outputValue; ... |
@Test public void getProvidedValueTest() { stormTaskTuple.addParameter(PluginParameterKeys.MIME_TYPE, MIME_TYPE); assertEquals(TaskTupleUtility.getParameterFromTuple(stormTaskTuple, PluginParameterKeys.MIME_TYPE), MIME_TYPE); } | public static String getParameterFromTuple(StormTaskTuple stormTaskTuple, String parameter) { String outputValue = PluginParameterKeys.PLUGIN_PARAMETERS.get(parameter); if (isProvidedAsParameter(stormTaskTuple, parameter)) { outputValue = stormTaskTuple.getParameter(parameter); } return outputValue; } | TaskTupleUtility { public static String getParameterFromTuple(StormTaskTuple stormTaskTuple, String parameter) { String outputValue = PluginParameterKeys.PLUGIN_PARAMETERS.get(parameter); if (isProvidedAsParameter(stormTaskTuple, parameter)) { outputValue = stormTaskTuple.getParameter(parameter); } return outputValue; ... | TaskTupleUtility { public static String getParameterFromTuple(StormTaskTuple stormTaskTuple, String parameter) { String outputValue = PluginParameterKeys.PLUGIN_PARAMETERS.get(parameter); if (isProvidedAsParameter(stormTaskTuple, parameter)) { outputValue = stormTaskTuple.getParameter(parameter); } return outputValue; ... | TaskTupleUtility { public static String getParameterFromTuple(StormTaskTuple stormTaskTuple, String parameter) { String outputValue = PluginParameterKeys.PLUGIN_PARAMETERS.get(parameter); if (isProvidedAsParameter(stormTaskTuple, parameter)) { outputValue = stormTaskTuple.getParameter(parameter); } return outputValue; ... | TaskTupleUtility { public static String getParameterFromTuple(StormTaskTuple stormTaskTuple, String parameter) { String outputValue = PluginParameterKeys.PLUGIN_PARAMETERS.get(parameter); if (isProvidedAsParameter(stormTaskTuple, parameter)) { outputValue = stormTaskTuple.getParameter(parameter); } return outputValue; ... |
@Test(expected = ZipException.class) public void shouldThrowZipExceptionWhileCompressEmptyFolder() throws Exception { folderPath = FileUtil.createFolder(); File folder = new File(folderPath); assertTrue(folder.isDirectory()); zipFolderPath = FileUtil.createZipFolderPath(new Date()); FolderCompressor.compress(folderPath... | public static void compress(String folderPath, String zipFolderPath) throws ZipException { File folder = new File(folderPath); ZipUtil.pack(folder, new File(zipFolderPath)); } | FolderCompressor { public static void compress(String folderPath, String zipFolderPath) throws ZipException { File folder = new File(folderPath); ZipUtil.pack(folder, new File(zipFolderPath)); } } | FolderCompressor { public static void compress(String folderPath, String zipFolderPath) throws ZipException { File folder = new File(folderPath); ZipUtil.pack(folder, new File(zipFolderPath)); } } | FolderCompressor { public static void compress(String folderPath, String zipFolderPath) throws ZipException { File folder = new File(folderPath); ZipUtil.pack(folder, new File(zipFolderPath)); } static void compress(String folderPath, String zipFolderPath); } | FolderCompressor { public static void compress(String folderPath, String zipFolderPath) throws ZipException { File folder = new File(folderPath); ZipUtil.pack(folder, new File(zipFolderPath)); } static void compress(String folderPath, String zipFolderPath); } |
@Test public void repeatOnError3Times_callNoThrowsExceptions_validResult() throws Exception { when(call.call()).thenReturn(RESULT); String result = Retriever.retryOnError3Times(ERROR_MESSAGE, call); assertEquals(RESULT, result); } | public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } | Retriever { public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } } | Retriever { public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } } | Retriever { public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } static void retryOnError3Times(String errorMessage, Runnable runnable); static V retryOnError3Times(String errorMessage, Callable<V> callable); static voi... | Retriever { public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } static void retryOnError3Times(String errorMessage, Runnable runnable); static V retryOnError3Times(String errorMessage, Callable<V> callable); static voi... |
@Test public void repeatOnError3Times_callNoThrowsExceptions_callInvokedOnce() throws Exception { when(call.call()).thenReturn(RESULT); String result = Retriever.retryOnError3Times(ERROR_MESSAGE, call); verify(call).call(); } | public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } | Retriever { public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } } | Retriever { public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } } | Retriever { public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } static void retryOnError3Times(String errorMessage, Runnable runnable); static V retryOnError3Times(String errorMessage, Callable<V> callable); static voi... | Retriever { public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } static void retryOnError3Times(String errorMessage, Runnable runnable); static V retryOnError3Times(String errorMessage, Callable<V> callable); static voi... |
@Test(expected = IOException.class) public void repeatOnError3Times_callAlwaysThrowsExceptions_catchedException() throws Exception { when(call.call()).thenThrow(IOException.class); Retriever.retryOnError3Times(ERROR_MESSAGE, call); } | public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } | Retriever { public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } } | Retriever { public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } } | Retriever { public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } static void retryOnError3Times(String errorMessage, Runnable runnable); static V retryOnError3Times(String errorMessage, Callable<V> callable); static voi... | Retriever { public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } static void retryOnError3Times(String errorMessage, Runnable runnable); static V retryOnError3Times(String errorMessage, Callable<V> callable); static voi... |
@Test public void repeatOnError3Timescall_AlwaysThrowsExceptions_callInvoked3Times() throws Exception { when(call.call()).thenThrow(IOException.class); try { Retriever.<String,IOException>retryOnError3Times(ERROR_MESSAGE, call); } catch (IOException e) { e.printStackTrace(); } verify(call,times(4)).call(); } | public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } | Retriever { public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } } | Retriever { public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } } | Retriever { public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } static void retryOnError3Times(String errorMessage, Runnable runnable); static V retryOnError3Times(String errorMessage, Callable<V> callable); static voi... | Retriever { public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } static void retryOnError3Times(String errorMessage, Runnable runnable); static V retryOnError3Times(String errorMessage, Callable<V> callable); static voi... |
@Test public void synchronizeShouldNotFailIfThereIsNoTask() { synchronizer.synchronizeTasksByTaskStateFromBasicInfo(TOPOLOGY_NAME, TOPICS); } | public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTaskStateTableList.stream()... | TaskStatusSynchronizer { public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTa... | TaskStatusSynchronizer { public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTa... | TaskStatusSynchronizer { public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTa... | TaskStatusSynchronizer { public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTa... |
@Test public void synchronizedShouldRepairInconsistentData() { when(tasksByStateDAO.listAllActiveTasksInTopology(eq(TOPOLOGY_NAME))).thenReturn(Collections.singletonList(TASK_TOPIC_INFO_1)); when(taskInfoDAO.findByIds(eq(Collections.singleton(1L)))).thenReturn(Collections.singletonList(INFO_1_OF_UNSYNCED)); synchronize... | public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTaskStateTableList.stream()... | TaskStatusSynchronizer { public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTa... | TaskStatusSynchronizer { public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTa... | TaskStatusSynchronizer { public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTa... | TaskStatusSynchronizer { public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTa... |
@Test public void synchronizedShouldNotTouchTasksWithConsistentData() { when(tasksByStateDAO.listAllActiveTasksInTopology(eq(TOPOLOGY_NAME))).thenReturn(Collections.singletonList(TASK_TOPIC_INFO_1)); when(taskInfoDAO.findByIds(eq(Collections.singleton(1L)))).thenReturn(Collections.singletonList(INFO_1)); synchronizer.s... | public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTaskStateTableList.stream()... | TaskStatusSynchronizer { public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTa... | TaskStatusSynchronizer { public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTa... | TaskStatusSynchronizer { public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTa... | TaskStatusSynchronizer { public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTa... |
@Test public void synchronizedShouldOnlyConcernTasksWithTopicReservedForTopology() { when(tasksByStateDAO.listAllActiveTasksInTopology(eq(TOPOLOGY_NAME))).thenReturn(Collections.singletonList(TASK_TOPIC_INFO_1_UNKNOWN_TOPIC)); synchronizer.synchronizeTasksByTaskStateFromBasicInfo(TOPOLOGY_NAME, TOPICS); verify(taskInfo... | public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTaskStateTableList.stream()... | TaskStatusSynchronizer { public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTa... | TaskStatusSynchronizer { public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTa... | TaskStatusSynchronizer { public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTa... | TaskStatusSynchronizer { public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTa... |
@Test public void testUpdateBasicInfoStateWithStartDateAndInfo() throws Exception { long taskId = 1; int containsElements = 1; int expectedSize = 1; String topologyName = null; TaskState taskState = TaskState.CURRENTLY_PROCESSING; String taskInfo = ""; Date startTime = new Date(); TaskInfo expectedTaskInfo = createTask... | @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTask... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... |
@Test public void testUpdateBasicInfoStateWithFinishDateAndInfo() throws Exception { long taskId = 1; int containsElements = 1; int expectedSize = 1; String topologyName = null; TaskState taskState = TaskState.CURRENTLY_PROCESSING; String taskInfo = ""; Date finishDate = new Date(); TaskInfo expectedTaskInfo = createTa... | @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTask... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... |
@Test public void shouldSuccessfullyCompressFolder() throws Exception { folderPath = FileUtil.createFolder(); File folder = new File(folderPath); assertTrue(folder.isDirectory()); InputStream inputStream = IOUtils.toInputStream("some test data for my input stream"); createFile(inputStream, folderPath + "fileName"); zip... | public static void compress(String folderPath, String zipFolderPath) throws ZipException { File folder = new File(folderPath); ZipUtil.pack(folder, new File(zipFolderPath)); } | FolderCompressor { public static void compress(String folderPath, String zipFolderPath) throws ZipException { File folder = new File(folderPath); ZipUtil.pack(folder, new File(zipFolderPath)); } } | FolderCompressor { public static void compress(String folderPath, String zipFolderPath) throws ZipException { File folder = new File(folderPath); ZipUtil.pack(folder, new File(zipFolderPath)); } } | FolderCompressor { public static void compress(String folderPath, String zipFolderPath) throws ZipException { File folder = new File(folderPath); ZipUtil.pack(folder, new File(zipFolderPath)); } static void compress(String folderPath, String zipFolderPath); } | FolderCompressor { public static void compress(String folderPath, String zipFolderPath) throws ZipException { File folder = new File(folderPath); ZipUtil.pack(folder, new File(zipFolderPath)); } static void compress(String folderPath, String zipFolderPath); } |
@Test public void verifyOnlyOneNotificationForRepeatedRecord() throws Exception { long taskId = 1; taskInfoDAO.insert(taskId, null, 10, 0, TaskState.CURRENTLY_PROCESSING.toString(), "", null, null, null, 0, null); Tuple tuple = createNotificationTuple(taskId, RecordState.SUCCESS); testedBolt.execute(tuple); testedBolt.... | @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTask... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... |
@Test public void testSuccessfulNotificationFor101Tuples() throws Exception { long taskId = 1; int expectedSize = 101; String topologyName = null; TaskState taskState = TaskState.CURRENTLY_PROCESSING; String taskInfo = ""; taskInfoDAO.insert(taskId, topologyName, expectedSize, 0, taskState.toString(), taskInfo, null, n... | @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTask... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... |
@Test public void testSuccessfulProgressUpdateAfterBoltRecreate() throws Exception { long taskId = 1; int expectedSize =4; String topologyName = ""; TaskState taskState = TaskState.CURRENTLY_PROCESSING; String taskInfo = ""; taskInfoDAO.insert(taskId, topologyName, expectedSize, 0, taskState.toString(), taskInfo, null,... | @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTask... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... |
@Test public void testValidNotificationAfterBoltRecreate() throws Exception { long taskId = 1; int expectedSize = 2; String topologyName = null; TaskState taskState = TaskState.CURRENTLY_PROCESSING; String taskInfo = ""; taskInfoDAO.insert(taskId, topologyName, 2, 0, taskState.toString(), taskInfo, null, null, null, 0,... | @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTask... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... |
@Test public void testValidErrorReportDataAfterBoltRecreate() throws Exception { long taskId = 1; String topologyName = null; TaskState taskState = TaskState.CURRENTLY_PROCESSING; String taskInfo = ""; taskInfoDAO.insert(taskId, topologyName, 2, 0, taskState.toString(), taskInfo, null, null, null, 0, null); final Tuple... | @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTask... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... |
@Test public void testNotificationProgressPercentage() throws Exception { CassandraReportService cassandraReportService = new CassandraReportService(HOST, PORT, KEYSPACE, "", ""); long taskId = 1; int expectedSize = 330; int errors = 5; int middle = (int) (Math.random() * expectedSize); String topologyName = ""; TaskSt... | @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTask... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... |
@Test public void testNotificationForErrors() throws Exception { CassandraReportService cassandraReportService = new CassandraReportService(HOST, PORT, KEYSPACE, "", ""); long taskId = 1; int expectedSize = 20; int errors = 9; String topologyName = null; TaskState taskState = TaskState.CURRENTLY_PROCESSING; String task... | @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTask... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... | NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId... |
@Test public void executeMcsBasedTask_taskIsNotKilled_verifyUpdateTaskInfoInCassandra() { task.addDataEntry(InputDataType.FILE_URLS, Collections.singletonList(FILE_URL_1)); submiter.execute(submitParameters); verify(taskStatusUpdater).updateStatusExpectedSize(eq(TASK_ID), eq(String.valueOf(TaskState.QUEUED)),eq(1)); } | public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... |
@Test public void executeMcsBasedTask_oneFileUrl() { task.addDataEntry(InputDataType.FILE_URLS, Collections.singletonList(FILE_URL_1)); submiter.execute(submitParameters); verifyValidTaskSent(FILE_URL_1); } | public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... |
@Test public void executeMcsBasedTask_threeFileUrls() { task.addDataEntry(InputDataType.FILE_URLS, Arrays.asList(FILE_URL_1,FILE_URL_2,FILE_URL_3)); submiter.execute(submitParameters); verifyValidTaskSent(FILE_URL_1,FILE_URL_2,FILE_URL_3); } | public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... |
@Test public void shouldSuccessfullyDownloadTwoRecords() throws Exception { Representation representation = prepareRepresentation(); inputStream = IOUtils.toInputStream("some test data for my input stream"); inputStream2 = IOUtils.toInputStream("some test data for my input stream"); when(dataSetServiceClient.getReprese... | public final String downloadFilesFromDataSet(String providerId, String datasetName, String representationName, int threadsCount) throws InterruptedException, ExecutionException, IOException, DriverException,MimeTypeException, RepresentationNotFoundException { ExecutorService executorService = Executors.newFixedThreadPo... | RecordDownloader { public final String downloadFilesFromDataSet(String providerId, String datasetName, String representationName, int threadsCount) throws InterruptedException, ExecutionException, IOException, DriverException,MimeTypeException, RepresentationNotFoundException { ExecutorService executorService = Executo... | RecordDownloader { public final String downloadFilesFromDataSet(String providerId, String datasetName, String representationName, int threadsCount) throws InterruptedException, ExecutionException, IOException, DriverException,MimeTypeException, RepresentationNotFoundException { ExecutorService executorService = Executo... | RecordDownloader { public final String downloadFilesFromDataSet(String providerId, String datasetName, String representationName, int threadsCount) throws InterruptedException, ExecutionException, IOException, DriverException,MimeTypeException, RepresentationNotFoundException { ExecutorService executorService = Executo... | RecordDownloader { public final String downloadFilesFromDataSet(String providerId, String datasetName, String representationName, int threadsCount) throws InterruptedException, ExecutionException, IOException, DriverException,MimeTypeException, RepresentationNotFoundException { ExecutorService executorService = Executo... |
@Test public void executeMcsBasedTask_3000FileUrls() { List<String> fileUrls=new ArrayList<>(); for(int i =0;i<3000;i++) { fileUrls.add(FILE_URL_1); } task.addDataEntry(InputDataType.FILE_URLS, fileUrls); submiter.execute(submitParameters); verifyValidTaskSent(fileUrls.toArray(new String[0])); } | public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... |
@Test public void executeMcsBasedTask_oneDatasetWithOneFile() { task.addDataEntry(InputDataType.DATASET_URLS, Collections.singletonList(DATASET_URL_1)); when(dataSetServiceClient.getRepresentationIterator(eq(DATASET_PROVIDER_1),eq(DATASET_ID_1))).thenReturn(representationIterator); when(representationIterator.hasNext()... | public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... |
@Test public void executeMcsBasedTask_oneDatasetWithThreeFiles() { task.addDataEntry(InputDataType.DATASET_URLS, Collections.singletonList(DATASET_URL_1)); when(dataSetServiceClient.getRepresentationIterator(eq(DATASET_PROVIDER_1),eq(DATASET_ID_1))).thenReturn(representationIterator); when(representationIterator.hasNex... | public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... |
@Test public void executeMcsBasedTask_oneLastRevisionWithOneFile() throws MCSException { task.addDataEntry(InputDataType.DATASET_URLS, Collections.singletonList(DATASET_URL_1)); task.addParameter(PluginParameterKeys.REVISION_NAME, REVISION_NAME); task.addParameter(PluginParameterKeys.REVISION_PROVIDER,REVISION_PROVIDER... | public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... |
@Test public void executeMcsBasedTask_lastRevisionsForTwoObject_verifyTwoRecordsSentToKafka() throws MCSException { prepareInvocationForLastRevisionOfTwoObjects(); submiter.execute(submitParameters); verifyValidTaskSent(FILE_URL_1,FILE_URL_1); } | public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... |
@Test public void executeMcsBasedTask_lastRevisionsForTwoObjectAndLimitTo1_verifyOnlyOneRecordSentToKafka() throws MCSException { prepareInvocationForLastRevisionOfTwoObjects(); task.addParameter(PluginParameterKeys.SAMPLE_SIZE,"1"); submiter.execute(submitParameters); verifyValidTaskSent(FILE_URL_1); } | public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... |
@Test public void executeMcsBasedTask_lastRevisionsForThreeObjectsInThreeChunks_verifyThreeRecordsSentToKafka() throws MCSException { prepareInvocationForLastRevisionForThreeObjectsInThreeChunks(); submiter.execute(submitParameters); verifyValidTaskSent(FILE_URL_1,FILE_URL_1,FILE_URL_1); } | public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... |
@Test public void executeMcsBasedTask_lastRevisionsForThreeObjectsInThreeChunks_verifyOnlyTwoRecordSentToKafka() throws MCSException { prepareInvocationForLastRevisionForThreeObjectsInThreeChunks(); task.addParameter(PluginParameterKeys.SAMPLE_SIZE,"2"); submiter.execute(submitParameters); verifyValidTaskSent(FILE_URL_... | public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... |
@Test public void executeMcsBasedTask_oneRevisionForGivenTimestampWithOneFile() throws MCSException { task.addDataEntry(InputDataType.DATASET_URLS, Collections.singletonList(DATASET_URL_1)); task.addParameter(PluginParameterKeys.REVISION_NAME, REVISION_NAME); task.addParameter(PluginParameterKeys.REVISION_PROVIDER,REVI... | public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... | MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); ch... |
@Test public void shouldGetProgressReport() throws Exception { TaskInfo taskInfo = new TaskInfo(TASK_ID, TOPOLOGY_NAME, TaskState.PROCESSED, EMPTY_STRING, 100, 100, 10, 50, new Date(), new Date(), new Date()); when(reportService.getTaskProgress(eq(Long.toString(TASK_ID)))).thenReturn(taskInfo); when(topologyManager.con... | @GetMapping(value = "{taskId}/progress", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public TaskInfo getTaskProgress( @PathVariable final String topologyName, @PathVariable final String taskId) throws AccessDeniedOrO... | TopologyTasksResource { @GetMapping(value = "{taskId}/progress", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public TaskInfo getTaskProgress( @PathVariable final String topologyName, @PathVariable final String taskId... | TopologyTasksResource { @GetMapping(value = "{taskId}/progress", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public TaskInfo getTaskProgress( @PathVariable final String topologyName, @PathVariable final String taskId... | TopologyTasksResource { @GetMapping(value = "{taskId}/progress", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public TaskInfo getTaskProgress( @PathVariable final String topologyName, @PathVariable final String taskId... | TopologyTasksResource { @GetMapping(value = "{taskId}/progress", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public TaskInfo getTaskProgress( @PathVariable final String topologyName, @PathVariable final String taskId... |
@Test(expected = RepresentationNotFoundException.class) public void shouldThrowRepresentationNotFoundException() throws Exception { when(dataSetServiceClient.getRepresentationIterator(anyString(), anyString())).thenReturn(representationIterator); when(representationIterator.hasNext()).thenReturn(false, false); recordDo... | public final String downloadFilesFromDataSet(String providerId, String datasetName, String representationName, int threadsCount) throws InterruptedException, ExecutionException, IOException, DriverException,MimeTypeException, RepresentationNotFoundException { ExecutorService executorService = Executors.newFixedThreadPo... | RecordDownloader { public final String downloadFilesFromDataSet(String providerId, String datasetName, String representationName, int threadsCount) throws InterruptedException, ExecutionException, IOException, DriverException,MimeTypeException, RepresentationNotFoundException { ExecutorService executorService = Executo... | RecordDownloader { public final String downloadFilesFromDataSet(String providerId, String datasetName, String representationName, int threadsCount) throws InterruptedException, ExecutionException, IOException, DriverException,MimeTypeException, RepresentationNotFoundException { ExecutorService executorService = Executo... | RecordDownloader { public final String downloadFilesFromDataSet(String providerId, String datasetName, String representationName, int threadsCount) throws InterruptedException, ExecutionException, IOException, DriverException,MimeTypeException, RepresentationNotFoundException { ExecutorService executorService = Executo... | RecordDownloader { public final String downloadFilesFromDataSet(String providerId, String datasetName, String representationName, int threadsCount) throws InterruptedException, ExecutionException, IOException, DriverException,MimeTypeException, RepresentationNotFoundException { ExecutorService executorService = Executo... |
@Test public void shouldThrowExceptionIfTaskIdWasNotFound() throws Exception { when(reportService.getTaskProgress(eq(Long.toString(TASK_ID)))).thenThrow(AccessDeniedOrObjectDoesNotExistException.class); when(topologyManager.containsTopology(TOPOLOGY_NAME)).thenReturn(true); ResultActions response = mockMvc.perform( get... | @GetMapping(value = "{taskId}/progress", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public TaskInfo getTaskProgress( @PathVariable final String topologyName, @PathVariable final String taskId) throws AccessDeniedOrO... | TopologyTasksResource { @GetMapping(value = "{taskId}/progress", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public TaskInfo getTaskProgress( @PathVariable final String topologyName, @PathVariable final String taskId... | TopologyTasksResource { @GetMapping(value = "{taskId}/progress", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public TaskInfo getTaskProgress( @PathVariable final String topologyName, @PathVariable final String taskId... | TopologyTasksResource { @GetMapping(value = "{taskId}/progress", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public TaskInfo getTaskProgress( @PathVariable final String topologyName, @PathVariable final String taskId... | TopologyTasksResource { @GetMapping(value = "{taskId}/progress", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public TaskInfo getTaskProgress( @PathVariable final String topologyName, @PathVariable final String taskId... |
@Test public void shouldGetStatisticReport() throws Exception { when(validationStatisticsService.getTaskStatisticsReport(TASK_ID)).thenReturn(new StatisticsReport(TASK_ID, null)); when(topologyManager.containsTopology(anyString())).thenReturn(true); ResultActions response = mockMvc.perform(get(VALIDATION_STATISTICS_REP... | @GetMapping(path = "{taskId}/statistics", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public StatisticsReport getTaskStatisticsReport( @PathVariable String topologyName, @PathVariable String taskId) throws AccessDeni... | ReportResource { @GetMapping(path = "{taskId}/statistics", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public StatisticsReport getTaskStatisticsReport( @PathVariable String topologyName, @PathVariable String taskId) ... | ReportResource { @GetMapping(path = "{taskId}/statistics", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public StatisticsReport getTaskStatisticsReport( @PathVariable String topologyName, @PathVariable String taskId) ... | ReportResource { @GetMapping(path = "{taskId}/statistics", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public StatisticsReport getTaskStatisticsReport( @PathVariable String topologyName, @PathVariable String taskId) ... | ReportResource { @GetMapping(path = "{taskId}/statistics", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public StatisticsReport getTaskStatisticsReport( @PathVariable String topologyName, @PathVariable String taskId) ... |
@Test public void shouldReturn405WhenStatisticsRequestedButTopologyNotFound() throws Exception { when(validationStatisticsService.getTaskStatisticsReport(TASK_ID)).thenReturn(new StatisticsReport(TASK_ID, null)); when(topologyManager.containsTopology(anyString())).thenReturn(false); ResultActions response = mockMvc.per... | @GetMapping(path = "{taskId}/statistics", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public StatisticsReport getTaskStatisticsReport( @PathVariable String topologyName, @PathVariable String taskId) throws AccessDeni... | ReportResource { @GetMapping(path = "{taskId}/statistics", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public StatisticsReport getTaskStatisticsReport( @PathVariable String topologyName, @PathVariable String taskId) ... | ReportResource { @GetMapping(path = "{taskId}/statistics", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public StatisticsReport getTaskStatisticsReport( @PathVariable String topologyName, @PathVariable String taskId) ... | ReportResource { @GetMapping(path = "{taskId}/statistics", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public StatisticsReport getTaskStatisticsReport( @PathVariable String topologyName, @PathVariable String taskId) ... | ReportResource { @GetMapping(path = "{taskId}/statistics", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public StatisticsReport getTaskStatisticsReport( @PathVariable String topologyName, @PathVariable String taskId) ... |
@Test public void shouldProvideSubmitterForDepublicationTopology() { TaskSubmitter taskSubmitter = new TaskSubmitterFactory( Mockito.mock(OaiTopologyTaskSubmitter.class), Mockito.mock(HttpTopologyTaskSubmitter.class), Mockito.mock(OtherTopologiesTaskSubmitter.class), Mockito.mock(DepublicationTaskSubmitter.class) ).pro... | public TaskSubmitter provideTaskSubmitter(SubmitTaskParameters parameters) { switch (parameters.getTopologyName()) { case TopologiesNames.OAI_TOPOLOGY: return oaiTopologyTaskSubmitter; case TopologiesNames.HTTP_TOPOLOGY: return httpTopologyTaskSubmitter; case TopologiesNames.ENRICHMENT_TOPOLOGY: case TopologiesNames.IN... | TaskSubmitterFactory { public TaskSubmitter provideTaskSubmitter(SubmitTaskParameters parameters) { switch (parameters.getTopologyName()) { case TopologiesNames.OAI_TOPOLOGY: return oaiTopologyTaskSubmitter; case TopologiesNames.HTTP_TOPOLOGY: return httpTopologyTaskSubmitter; case TopologiesNames.ENRICHMENT_TOPOLOGY: ... | TaskSubmitterFactory { public TaskSubmitter provideTaskSubmitter(SubmitTaskParameters parameters) { switch (parameters.getTopologyName()) { case TopologiesNames.OAI_TOPOLOGY: return oaiTopologyTaskSubmitter; case TopologiesNames.HTTP_TOPOLOGY: return httpTopologyTaskSubmitter; case TopologiesNames.ENRICHMENT_TOPOLOGY: ... | TaskSubmitterFactory { public TaskSubmitter provideTaskSubmitter(SubmitTaskParameters parameters) { switch (parameters.getTopologyName()) { case TopologiesNames.OAI_TOPOLOGY: return oaiTopologyTaskSubmitter; case TopologiesNames.HTTP_TOPOLOGY: return httpTopologyTaskSubmitter; case TopologiesNames.ENRICHMENT_TOPOLOGY: ... | TaskSubmitterFactory { public TaskSubmitter provideTaskSubmitter(SubmitTaskParameters parameters) { switch (parameters.getTopologyName()) { case TopologiesNames.OAI_TOPOLOGY: return oaiTopologyTaskSubmitter; case TopologiesNames.HTTP_TOPOLOGY: return httpTopologyTaskSubmitter; case TopologiesNames.ENRICHMENT_TOPOLOGY: ... |
@Test public void shouldProvideSubmitterForOaiTopology() { TaskSubmitter taskSubmitter = new TaskSubmitterFactory( Mockito.mock(OaiTopologyTaskSubmitter.class), Mockito.mock(HttpTopologyTaskSubmitter.class), Mockito.mock(OtherTopologiesTaskSubmitter.class), Mockito.mock(DepublicationTaskSubmitter.class) ).provideTaskSu... | public TaskSubmitter provideTaskSubmitter(SubmitTaskParameters parameters) { switch (parameters.getTopologyName()) { case TopologiesNames.OAI_TOPOLOGY: return oaiTopologyTaskSubmitter; case TopologiesNames.HTTP_TOPOLOGY: return httpTopologyTaskSubmitter; case TopologiesNames.ENRICHMENT_TOPOLOGY: case TopologiesNames.IN... | TaskSubmitterFactory { public TaskSubmitter provideTaskSubmitter(SubmitTaskParameters parameters) { switch (parameters.getTopologyName()) { case TopologiesNames.OAI_TOPOLOGY: return oaiTopologyTaskSubmitter; case TopologiesNames.HTTP_TOPOLOGY: return httpTopologyTaskSubmitter; case TopologiesNames.ENRICHMENT_TOPOLOGY: ... | TaskSubmitterFactory { public TaskSubmitter provideTaskSubmitter(SubmitTaskParameters parameters) { switch (parameters.getTopologyName()) { case TopologiesNames.OAI_TOPOLOGY: return oaiTopologyTaskSubmitter; case TopologiesNames.HTTP_TOPOLOGY: return httpTopologyTaskSubmitter; case TopologiesNames.ENRICHMENT_TOPOLOGY: ... | TaskSubmitterFactory { public TaskSubmitter provideTaskSubmitter(SubmitTaskParameters parameters) { switch (parameters.getTopologyName()) { case TopologiesNames.OAI_TOPOLOGY: return oaiTopologyTaskSubmitter; case TopologiesNames.HTTP_TOPOLOGY: return httpTopologyTaskSubmitter; case TopologiesNames.ENRICHMENT_TOPOLOGY: ... | TaskSubmitterFactory { public TaskSubmitter provideTaskSubmitter(SubmitTaskParameters parameters) { switch (parameters.getTopologyName()) { case TopologiesNames.OAI_TOPOLOGY: return oaiTopologyTaskSubmitter; case TopologiesNames.HTTP_TOPOLOGY: return httpTopologyTaskSubmitter; case TopologiesNames.ENRICHMENT_TOPOLOGY: ... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.