target
stringlengths
20
113k
src_fm
stringlengths
11
86.3k
src_fm_fc
stringlengths
21
86.4k
src_fm_fc_co
stringlengths
30
86.4k
src_fm_fc_ms
stringlengths
42
86.8k
src_fm_fc_ms_ff
stringlengths
43
86.8k
@Test(expected = AccessDeniedOrObjectDoesNotExistException.class) @Betamax(tape = "records_shouldThrowAccessDeniedOrObjectDoesNotExistExceptionWhileTryingToRevokePermissions") public void shouldThrowAccessDeniedOrObjectDoesNotExistExceptionWhileTryingToRevokePermissions() throws MCSException, IOException { RecordServic...
public void revokePermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_PERMISSION) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representa...
RecordServiceClient extends MCSClient { public void revokePermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_PERMISSION) .resolveTemplate(CLOUD_ID, cloudId) .resolve...
RecordServiceClient extends MCSClient { public void revokePermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_PERMISSION) .resolveTemplate(CLOUD_ID, cloudId) .resolve...
RecordServiceClient extends MCSClient { public void revokePermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_PERMISSION) .resolveTemplate(CLOUD_ID, cloudId) .resolve...
RecordServiceClient extends MCSClient { public void revokePermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_PERMISSION) .resolveTemplate(CLOUD_ID, cloudId) .resolve...
@Test(expected = DriverException.class) @Betamax(tape = "records_shouldThrowDriverExceptionWhileMcsIsNotAvailable") public void shouldThrowMcsExceptionWhileMcsIsNotAvailable() throws MCSException { RecordServiceClient client = new RecordServiceClient("http: client.grantPermissionsToVersion(CLOUD_ID, REPRESENTATION_NAME...
public void grantPermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_PERMISSION) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representat...
RecordServiceClient extends MCSClient { public void grantPermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_PERMISSION) .resolveTemplate(CLOUD_ID, cloudId) .resolveT...
RecordServiceClient extends MCSClient { public void grantPermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_PERMISSION) .resolveTemplate(CLOUD_ID, cloudId) .resolveT...
RecordServiceClient extends MCSClient { public void grantPermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_PERMISSION) .resolveTemplate(CLOUD_ID, cloudId) .resolveT...
RecordServiceClient extends MCSClient { public void grantPermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_PERMISSION) .resolveTemplate(CLOUD_ID, cloudId) .resolveT...
@Test @Betamax(tape = "records_shouldCreateNewRepresentationAndUploadFile") public void shouldCreateNewRepresentationAndUploadAFile() throws IOException, FileNotFoundException, MCSException { RecordServiceClient client = new RecordServiceClient("http: InputStream stream = new ByteArrayInputStream("example File Content"...
public URI createRepresentation(String cloudId, String representationName, String providerId) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_RESOURCE) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationName); Builder request = target.request();...
RecordServiceClient extends MCSClient { public URI createRepresentation(String cloudId, String representationName, String providerId) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_RESOURCE) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationNa...
RecordServiceClient extends MCSClient { public URI createRepresentation(String cloudId, String representationName, String providerId) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_RESOURCE) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationNa...
RecordServiceClient extends MCSClient { public URI createRepresentation(String cloudId, String representationName, String providerId) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_RESOURCE) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationNa...
RecordServiceClient extends MCSClient { public URI createRepresentation(String cloudId, String representationName, String providerId) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_RESOURCE) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationNa...
@Test @Betamax(tape = "records_shouldCreateNewRepresentationAndUploadFile") public void shouldCreateNewRepresentationAndUploadAFile_1() throws IOException, FileNotFoundException, MCSException { RecordServiceClient client = new RecordServiceClient("http: InputStream stream = new ByteArrayInputStream("example File Conten...
public URI createRepresentation(String cloudId, String representationName, String providerId) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_RESOURCE) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationName); Builder request = target.request();...
RecordServiceClient extends MCSClient { public URI createRepresentation(String cloudId, String representationName, String providerId) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_RESOURCE) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationNa...
RecordServiceClient extends MCSClient { public URI createRepresentation(String cloudId, String representationName, String providerId) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_RESOURCE) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationNa...
RecordServiceClient extends MCSClient { public URI createRepresentation(String cloudId, String representationName, String providerId) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_RESOURCE) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationNa...
RecordServiceClient extends MCSClient { public URI createRepresentation(String cloudId, String representationName, String providerId) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_RESOURCE) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationNa...
@Betamax(tape = "records_shouldRetrieveRepresentationByRevision") @Test public void shouldRetrieveRepresentationRevision() throws MCSException { RecordServiceClient instance = new RecordServiceClient("http: List<Representation> representations = instance.getRepresentationsByRevision("Z6DX3RWCEFUUSGRUWP6QZWRIZKY7HI5Y7H4...
public List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp) throws MCSException { WebTarget webtarget = client .target(baseUrl) .path(REPRESENTATION_REVISIONS_RESOURCE) .resolveTemplate(CLOUD_ID, cloudId) ...
RecordServiceClient extends MCSClient { public List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp) throws MCSException { WebTarget webtarget = client .target(baseUrl) .path(REPRESENTATION_REVISIONS_RESOUR...
RecordServiceClient extends MCSClient { public List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp) throws MCSException { WebTarget webtarget = client .target(baseUrl) .path(REPRESENTATION_REVISIONS_RESOUR...
RecordServiceClient extends MCSClient { public List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp) throws MCSException { WebTarget webtarget = client .target(baseUrl) .path(REPRESENTATION_REVISIONS_RESOUR...
RecordServiceClient extends MCSClient { public List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp) throws MCSException { WebTarget webtarget = client .target(baseUrl) .path(REPRESENTATION_REVISIONS_RESOUR...
@Test public void testFindAclNotExisting() { AclObjectIdentity newAoi = new AclObjectIdentity(); newAoi.setId("invalid"); newAoi.setObjectClass(aoi_class); newAoi.setOwnerId(sid1); service.findAclObjectIdentity(newAoi); }
@Override public AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentity: objectIdentity: " + objectId); } Row row = session .execute(QueryBuilder.select().all().from(keyspace, AOI_TABLE) .where(QueryBuil...
CassandraAclRepository implements AclRepository { @Override public AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentity: objectIdentity: " + objectId); } Row row = session .execute(QueryBuilder.select(...
CassandraAclRepository implements AclRepository { @Override public AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentity: objectIdentity: " + objectId); } Row row = session .execute(QueryBuilder.select(...
CassandraAclRepository implements AclRepository { @Override public AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentity: objectIdentity: " + objectId); } Row row = session .execute(QueryBuilder.select(...
CassandraAclRepository implements AclRepository { @Override public AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentity: objectIdentity: " + objectId); } Row row = session .execute(QueryBuilder.select(...
@Betamax(tape = "records_shouldThrowRepresentationNotExist") @Test(expected = RepresentationNotExistsException.class) public void shouldThrowRepresentationNotExists() throws MCSException { RecordServiceClient instance = new RecordServiceClient("http: instance.getRepresentationsByRevision("Z6DX3RWCEFUUSGRUWP6QZWRIZKY7HI...
public List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp) throws MCSException { WebTarget webtarget = client .target(baseUrl) .path(REPRESENTATION_REVISIONS_RESOURCE) .resolveTemplate(CLOUD_ID, cloudId) ...
RecordServiceClient extends MCSClient { public List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp) throws MCSException { WebTarget webtarget = client .target(baseUrl) .path(REPRESENTATION_REVISIONS_RESOUR...
RecordServiceClient extends MCSClient { public List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp) throws MCSException { WebTarget webtarget = client .target(baseUrl) .path(REPRESENTATION_REVISIONS_RESOUR...
RecordServiceClient extends MCSClient { public List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp) throws MCSException { WebTarget webtarget = client .target(baseUrl) .path(REPRESENTATION_REVISIONS_RESOUR...
RecordServiceClient extends MCSClient { public List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp) throws MCSException { WebTarget webtarget = client .target(baseUrl) .path(REPRESENTATION_REVISIONS_RESOUR...
@Test(expected = ProviderDoesNotExistException.class) public void shouldFailWhenFetchingNonExistingProvider() throws ProviderDoesNotExistException { cassandraDataProviderService.getProvider("provident"); }
@Override public DataProvider getProvider(String providerId) throws ProviderDoesNotExistException { LOGGER.info("getProvider() providerId='{}'", providerId); DataProvider dp = dataProviderDao.getProvider(providerId); if (dp == null) { LOGGER.warn("ProviderDoesNotExistException providerId='{}''", providerId); throw new ...
CassandraDataProviderService implements DataProviderService { @Override public DataProvider getProvider(String providerId) throws ProviderDoesNotExistException { LOGGER.info("getProvider() providerId='{}'", providerId); DataProvider dp = dataProviderDao.getProvider(providerId); if (dp == null) { LOGGER.warn("ProviderDo...
CassandraDataProviderService implements DataProviderService { @Override public DataProvider getProvider(String providerId) throws ProviderDoesNotExistException { LOGGER.info("getProvider() providerId='{}'", providerId); DataProvider dp = dataProviderDao.getProvider(providerId); if (dp == null) { LOGGER.warn("ProviderDo...
CassandraDataProviderService implements DataProviderService { @Override public DataProvider getProvider(String providerId) throws ProviderDoesNotExistException { LOGGER.info("getProvider() providerId='{}'", providerId); DataProvider dp = dataProviderDao.getProvider(providerId); if (dp == null) { LOGGER.warn("ProviderDo...
CassandraDataProviderService implements DataProviderService { @Override public DataProvider getProvider(String providerId) throws ProviderDoesNotExistException { LOGGER.info("getProvider() providerId='{}'", providerId); DataProvider dp = dataProviderDao.getProvider(providerId); if (dp == null) { LOGGER.warn("ProviderDo...
@Test public void shouldReturnEmptyArrayWhenNoProviderAdded() { assertTrue("Expecting no providers", cassandraDataProviderService.getProviders(null, 1).getResults().isEmpty()); }
@Override public ResultSlice<DataProvider> getProviders(String thresholdProviderId, int limit) { LOGGER.info("getProviders() thresholdProviderId='{}', limit='{}'", thresholdProviderId, limit); String nextProvider = null; List<DataProvider> providers = dataProviderDao.getProviders(thresholdProviderId, limit + 1); final ...
CassandraDataProviderService implements DataProviderService { @Override public ResultSlice<DataProvider> getProviders(String thresholdProviderId, int limit) { LOGGER.info("getProviders() thresholdProviderId='{}', limit='{}'", thresholdProviderId, limit); String nextProvider = null; List<DataProvider> providers = dataPr...
CassandraDataProviderService implements DataProviderService { @Override public ResultSlice<DataProvider> getProviders(String thresholdProviderId, int limit) { LOGGER.info("getProviders() thresholdProviderId='{}', limit='{}'", thresholdProviderId, limit); String nextProvider = null; List<DataProvider> providers = dataPr...
CassandraDataProviderService implements DataProviderService { @Override public ResultSlice<DataProvider> getProviders(String thresholdProviderId, int limit) { LOGGER.info("getProviders() thresholdProviderId='{}', limit='{}'", thresholdProviderId, limit); String nextProvider = null; List<DataProvider> providers = dataPr...
CassandraDataProviderService implements DataProviderService { @Override public ResultSlice<DataProvider> getProviders(String thresholdProviderId, int limit) { LOGGER.info("getProviders() thresholdProviderId='{}', limit='{}'", thresholdProviderId, limit); String nextProvider = null; List<DataProvider> providers = dataPr...
@Test(expected = ProviderDoesNotExistException.class) public void shouldThrowExceptionWhenDeletingNonExistingProvider() throws ProviderDoesNotExistException { cassandraDataProviderService.deleteProvider("not existing provident"); }
@Override public void deleteProvider(String providerId) throws ProviderDoesNotExistException { LOGGER.info("Deleting provider {}", providerId); DataProvider dp = dataProviderDao.getProvider(providerId); if (dp == null) { LOGGER.warn("ProviderDoesNotExistException providerId='{}'", providerId); throw new ProviderDoesNot...
CassandraDataProviderService implements DataProviderService { @Override public void deleteProvider(String providerId) throws ProviderDoesNotExistException { LOGGER.info("Deleting provider {}", providerId); DataProvider dp = dataProviderDao.getProvider(providerId); if (dp == null) { LOGGER.warn("ProviderDoesNotExistExce...
CassandraDataProviderService implements DataProviderService { @Override public void deleteProvider(String providerId) throws ProviderDoesNotExistException { LOGGER.info("Deleting provider {}", providerId); DataProvider dp = dataProviderDao.getProvider(providerId); if (dp == null) { LOGGER.warn("ProviderDoesNotExistExce...
CassandraDataProviderService implements DataProviderService { @Override public void deleteProvider(String providerId) throws ProviderDoesNotExistException { LOGGER.info("Deleting provider {}", providerId); DataProvider dp = dataProviderDao.getProvider(providerId); if (dp == null) { LOGGER.warn("ProviderDoesNotExistExce...
CassandraDataProviderService implements DataProviderService { @Override public void deleteProvider(String providerId) throws ProviderDoesNotExistException { LOGGER.info("Deleting provider {}", providerId); DataProvider dp = dataProviderDao.getProvider(providerId); if (dp == null) { LOGGER.warn("ProviderDoesNotExistExce...
@Test(expected = RecordDoesNotExistException.class) public void testRecordDoesNotExist() throws Exception { service.getCloudId("test2", "test2"); }
@Override public CloudId getCloudId(String providerId, String recordId) throws DatabaseConnectionException, RecordDoesNotExistException { LOGGER.info("getCloudId() providerId='{}', recordId='{}'", providerId, recordId); List<CloudId> cloudIds = localIdDao.searchById(providerId, recordId); if (cloudIds.isEmpty()) { thro...
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public CloudId getCloudId(String providerId, String recordId) throws DatabaseConnectionException, RecordDoesNotExistException { LOGGER.info("getCloudId() providerId='{}', recordId='{}'", providerId, recordId); List<CloudId> cloudIds = local...
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public CloudId getCloudId(String providerId, String recordId) throws DatabaseConnectionException, RecordDoesNotExistException { LOGGER.info("getCloudId() providerId='{}', recordId='{}'", providerId, recordId); List<CloudId> cloudIds = local...
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public CloudId getCloudId(String providerId, String recordId) throws DatabaseConnectionException, RecordDoesNotExistException { LOGGER.info("getCloudId() providerId='{}', recordId='{}'", providerId, recordId); List<CloudId> cloudIds = local...
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public CloudId getCloudId(String providerId, String recordId) throws DatabaseConnectionException, RecordDoesNotExistException { LOGGER.info("getCloudId() providerId='{}', recordId='{}'", providerId, recordId); List<CloudId> cloudIds = local...
@Test(expected = CloudIdDoesNotExistException.class) public void testGetLocalIdsByCloudId() throws Exception { List<CloudId> gid = service.getLocalIdsByCloudId(IdGenerator .encodeWithSha256AndBase32("/test11/test11")); CloudId gId = service.createCloudId("test11", "test11"); gid = service.getLocalIdsByCloudId(gId.getId...
@Override public List<CloudId> getLocalIdsByCloudId(String cloudId) throws DatabaseConnectionException, CloudIdDoesNotExistException { LOGGER.info("getLocalIdsByCloudId() cloudId='{}'", cloudId); List<CloudId> cloudIds = cloudIdDao.searchById(cloudId); if (cloudIds.isEmpty()) { LOGGER.warn("CloudIdDoesNotExistException...
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public List<CloudId> getLocalIdsByCloudId(String cloudId) throws DatabaseConnectionException, CloudIdDoesNotExistException { LOGGER.info("getLocalIdsByCloudId() cloudId='{}'", cloudId); List<CloudId> cloudIds = cloudIdDao.searchById(cloudId...
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public List<CloudId> getLocalIdsByCloudId(String cloudId) throws DatabaseConnectionException, CloudIdDoesNotExistException { LOGGER.info("getLocalIdsByCloudId() cloudId='{}'", cloudId); List<CloudId> cloudIds = cloudIdDao.searchById(cloudId...
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public List<CloudId> getLocalIdsByCloudId(String cloudId) throws DatabaseConnectionException, CloudIdDoesNotExistException { LOGGER.info("getLocalIdsByCloudId() cloudId='{}'", cloudId); List<CloudId> cloudIds = cloudIdDao.searchById(cloudId...
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public List<CloudId> getLocalIdsByCloudId(String cloudId) throws DatabaseConnectionException, CloudIdDoesNotExistException { LOGGER.info("getLocalIdsByCloudId() cloudId='{}'", cloudId); List<CloudId> cloudIds = cloudIdDao.searchById(cloudId...
@Test public void testGetCloudIdsByProvider() throws Exception { String providerId = "providerId"; dataProviderDao.createDataProvider(providerId, new DataProviderProperties()); service.createCloudId(providerId, "test3"); service.createCloudId(providerId, "test2"); List<CloudId> cIds = service .getCloudIdsByProvider(pro...
@Override public List<CloudId> getCloudIdsByProvider(String providerId, String startRecordId, int limit) throws DatabaseConnectionException, ProviderDoesNotExistException { LOGGER.info("getCloudIdsByProvider() providerId='{}', startRecordId='{}', end='{}'", providerId, startRecordId, limit); if (dataProviderDao.getProv...
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public List<CloudId> getCloudIdsByProvider(String providerId, String startRecordId, int limit) throws DatabaseConnectionException, ProviderDoesNotExistException { LOGGER.info("getCloudIdsByProvider() providerId='{}', startRecordId='{}', end...
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public List<CloudId> getCloudIdsByProvider(String providerId, String startRecordId, int limit) throws DatabaseConnectionException, ProviderDoesNotExistException { LOGGER.info("getCloudIdsByProvider() providerId='{}', startRecordId='{}', end...
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public List<CloudId> getCloudIdsByProvider(String providerId, String startRecordId, int limit) throws DatabaseConnectionException, ProviderDoesNotExistException { LOGGER.info("getCloudIdsByProvider() providerId='{}', startRecordId='{}', end...
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public List<CloudId> getCloudIdsByProvider(String providerId, String startRecordId, int limit) throws DatabaseConnectionException, ProviderDoesNotExistException { LOGGER.info("getCloudIdsByProvider() providerId='{}', startRecordId='{}', end...
@Test(expected = IdHasBeenMappedException.class) public void testCreateIdMapping() throws Exception { dataProviderDao.createDataProvider("test12", new DataProviderProperties()); CloudId gid = service.createCloudId("test12", "test12"); service.createIdMapping(gid.getId(), "test12", "test13"); service.createIdMapping(gid...
@Override public CloudId createIdMapping(String cloudId, String providerId, String recordId) throws DatabaseConnectionException, CloudIdDoesNotExistException, IdHasBeenMappedException, ProviderDoesNotExistException, CloudIdAlreadyExistException { LOGGER.info("createIdMapping() creating mapping for cloudId='{}', provide...
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public CloudId createIdMapping(String cloudId, String providerId, String recordId) throws DatabaseConnectionException, CloudIdDoesNotExistException, IdHasBeenMappedException, ProviderDoesNotExistException, CloudIdAlreadyExistException { LOG...
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public CloudId createIdMapping(String cloudId, String providerId, String recordId) throws DatabaseConnectionException, CloudIdDoesNotExistException, IdHasBeenMappedException, ProviderDoesNotExistException, CloudIdAlreadyExistException { LOG...
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public CloudId createIdMapping(String cloudId, String providerId, String recordId) throws DatabaseConnectionException, CloudIdDoesNotExistException, IdHasBeenMappedException, ProviderDoesNotExistException, CloudIdAlreadyExistException { LOG...
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public CloudId createIdMapping(String cloudId, String providerId, String recordId) throws DatabaseConnectionException, CloudIdDoesNotExistException, IdHasBeenMappedException, ProviderDoesNotExistException, CloudIdAlreadyExistException { LOG...
@Test(expected = RecordDoesNotExistException.class) public void testRemoveIdMapping() throws Exception { dataProviderDao.createDataProvider("test16", new DataProviderProperties()); service.createCloudId("test16", "test16"); service.removeIdMapping("test16", "test16"); service.getCloudId("test16", "test16"); }
@Override public void removeIdMapping(String providerId, String recordId) throws DatabaseConnectionException, ProviderDoesNotExistException { LOGGER.info("removeIdMapping() removing Id mapping for providerId='{}', recordId='{}' ...", providerId, recordId); if (dataProviderDao.getProvider(providerId) == null) { LOGGER.w...
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public void removeIdMapping(String providerId, String recordId) throws DatabaseConnectionException, ProviderDoesNotExistException { LOGGER.info("removeIdMapping() removing Id mapping for providerId='{}', recordId='{}' ...", providerId, reco...
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public void removeIdMapping(String providerId, String recordId) throws DatabaseConnectionException, ProviderDoesNotExistException { LOGGER.info("removeIdMapping() removing Id mapping for providerId='{}', recordId='{}' ...", providerId, reco...
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public void removeIdMapping(String providerId, String recordId) throws DatabaseConnectionException, ProviderDoesNotExistException { LOGGER.info("removeIdMapping() removing Id mapping for providerId='{}', recordId='{}' ...", providerId, reco...
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public void removeIdMapping(String providerId, String recordId) throws DatabaseConnectionException, ProviderDoesNotExistException { LOGGER.info("removeIdMapping() removing Id mapping for providerId='{}', recordId='{}' ...", providerId, reco...
@Test(expected = RecordDoesNotExistException.class) public void testDeleteCloudId() throws Exception { dataProviderDao.createDataProvider("test21", new DataProviderProperties()); CloudId cId = service.createCloudId("test21", "test21"); service.deleteCloudId(cId.getId()); service.getCloudId(cId.getLocalId().getProviderI...
@Override public List<CloudId> deleteCloudId(String cloudId) throws DatabaseConnectionException, CloudIdDoesNotExistException { LOGGER.info("deleteCloudId() deleting cloudId='{}' ...", cloudId); if (cloudIdDao.searchById(cloudId).isEmpty()) { LOGGER.warn("CloudIdDoesNotExistException for cloudId='{}'", cloudId); throw ...
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public List<CloudId> deleteCloudId(String cloudId) throws DatabaseConnectionException, CloudIdDoesNotExistException { LOGGER.info("deleteCloudId() deleting cloudId='{}' ...", cloudId); if (cloudIdDao.searchById(cloudId).isEmpty()) { LOGGER....
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public List<CloudId> deleteCloudId(String cloudId) throws DatabaseConnectionException, CloudIdDoesNotExistException { LOGGER.info("deleteCloudId() deleting cloudId='{}' ...", cloudId); if (cloudIdDao.searchById(cloudId).isEmpty()) { LOGGER....
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public List<CloudId> deleteCloudId(String cloudId) throws DatabaseConnectionException, CloudIdDoesNotExistException { LOGGER.info("deleteCloudId() deleting cloudId='{}' ...", cloudId); if (cloudIdDao.searchById(cloudId).isEmpty()) { LOGGER....
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public List<CloudId> deleteCloudId(String cloudId) throws DatabaseConnectionException, CloudIdDoesNotExistException { LOGGER.info("deleteCloudId() deleting cloudId='{}' ...", cloudId); if (cloudIdDao.searchById(cloudId).isEmpty()) { LOGGER....
@Test(expected = IllegalArgumentException.class) public void testFindAclWithNullValues() { AclObjectIdentity newAoi = new AclObjectIdentity(); service.findAclObjectIdentity(newAoi); }
@Override public AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentity: objectIdentity: " + objectId); } Row row = session .execute(QueryBuilder.select().all().from(keyspace, AOI_TABLE) .where(QueryBuil...
CassandraAclRepository implements AclRepository { @Override public AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentity: objectIdentity: " + objectId); } Row row = session .execute(QueryBuilder.select(...
CassandraAclRepository implements AclRepository { @Override public AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentity: objectIdentity: " + objectId); } Row row = session .execute(QueryBuilder.select(...
CassandraAclRepository implements AclRepository { @Override public AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentity: objectIdentity: " + objectId); } Row row = session .execute(QueryBuilder.select(...
CassandraAclRepository implements AclRepository { @Override public AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentity: objectIdentity: " + objectId); } Row row = session .execute(QueryBuilder.select(...
@Test(expected = CloudIdDoesNotExistException.class) public void testDeleteCloudIdException() throws Exception { service.deleteCloudId("test"); }
@Override public List<CloudId> deleteCloudId(String cloudId) throws DatabaseConnectionException, CloudIdDoesNotExistException { LOGGER.info("deleteCloudId() deleting cloudId='{}' ...", cloudId); if (cloudIdDao.searchById(cloudId).isEmpty()) { LOGGER.warn("CloudIdDoesNotExistException for cloudId='{}'", cloudId); throw ...
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public List<CloudId> deleteCloudId(String cloudId) throws DatabaseConnectionException, CloudIdDoesNotExistException { LOGGER.info("deleteCloudId() deleting cloudId='{}' ...", cloudId); if (cloudIdDao.searchById(cloudId).isEmpty()) { LOGGER....
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public List<CloudId> deleteCloudId(String cloudId) throws DatabaseConnectionException, CloudIdDoesNotExistException { LOGGER.info("deleteCloudId() deleting cloudId='{}' ...", cloudId); if (cloudIdDao.searchById(cloudId).isEmpty()) { LOGGER....
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public List<CloudId> deleteCloudId(String cloudId) throws DatabaseConnectionException, CloudIdDoesNotExistException { LOGGER.info("deleteCloudId() deleting cloudId='{}' ...", cloudId); if (cloudIdDao.searchById(cloudId).isEmpty()) { LOGGER....
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public List<CloudId> deleteCloudId(String cloudId) throws DatabaseConnectionException, CloudIdDoesNotExistException { LOGGER.info("deleteCloudId() deleting cloudId='{}' ...", cloudId); if (cloudIdDao.searchById(cloudId).isEmpty()) { LOGGER....
@Test @Ignore public void createCloudIdCollisonTest() throws DatabaseConnectionException, RecordExistsException, ProviderDoesNotExistException, RecordDatasetEmptyException, CloudIdDoesNotExistException, CloudIdAlreadyExistException { final Map<String, String> map = new HashMap<String, String>(); dataProviderDao.createD...
@Override public CloudId createCloudId(String... recordInfo) throws DatabaseConnectionException, RecordExistsException, ProviderDoesNotExistException, CloudIdAlreadyExistException { LOGGER.info("createCloudId() creating cloudId"); String providerId = recordInfo[0]; LOGGER.info("createCloudId() creating cloudId provider...
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public CloudId createCloudId(String... recordInfo) throws DatabaseConnectionException, RecordExistsException, ProviderDoesNotExistException, CloudIdAlreadyExistException { LOGGER.info("createCloudId() creating cloudId"); String providerId =...
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public CloudId createCloudId(String... recordInfo) throws DatabaseConnectionException, RecordExistsException, ProviderDoesNotExistException, CloudIdAlreadyExistException { LOGGER.info("createCloudId() creating cloudId"); String providerId =...
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public CloudId createCloudId(String... recordInfo) throws DatabaseConnectionException, RecordExistsException, ProviderDoesNotExistException, CloudIdAlreadyExistException { LOGGER.info("createCloudId() creating cloudId"); String providerId =...
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public CloudId createCloudId(String... recordInfo) throws DatabaseConnectionException, RecordExistsException, ProviderDoesNotExistException, CloudIdAlreadyExistException { LOGGER.info("createCloudId() creating cloudId"); String providerId =...
@Test(expected = CloudIdAlreadyExistException.class) public void insert_tryInsertTheSameContentTwice_ThrowsCloudIdAlreadyExistException() throws Exception { final String providerId = "providerId"; final String recordId = "recordId"; final String id = "id"; service.insert(true, id, providerId, recordId); service.insert(...
public List<CloudId> insert(boolean insertOnlyIfNoExist, String... args) throws DatabaseConnectionException, CloudIdAlreadyExistException { ResultSet rs = null; try { if (insertOnlyIfNoExist) { rs = dbService.getSession().execute(insertIfNoExistsStatement.bind(args[0], args[1], args[2])); Row row = rs.one(); if (!row.g...
CassandraCloudIdDAO { public List<CloudId> insert(boolean insertOnlyIfNoExist, String... args) throws DatabaseConnectionException, CloudIdAlreadyExistException { ResultSet rs = null; try { if (insertOnlyIfNoExist) { rs = dbService.getSession().execute(insertIfNoExistsStatement.bind(args[0], args[1], args[2])); Row row ...
CassandraCloudIdDAO { public List<CloudId> insert(boolean insertOnlyIfNoExist, String... args) throws DatabaseConnectionException, CloudIdAlreadyExistException { ResultSet rs = null; try { if (insertOnlyIfNoExist) { rs = dbService.getSession().execute(insertIfNoExistsStatement.bind(args[0], args[1], args[2])); Row row ...
CassandraCloudIdDAO { public List<CloudId> insert(boolean insertOnlyIfNoExist, String... args) throws DatabaseConnectionException, CloudIdAlreadyExistException { ResultSet rs = null; try { if (insertOnlyIfNoExist) { rs = dbService.getSession().execute(insertIfNoExistsStatement.bind(args[0], args[1], args[2])); Row row ...
CassandraCloudIdDAO { public List<CloudId> insert(boolean insertOnlyIfNoExist, String... args) throws DatabaseConnectionException, CloudIdAlreadyExistException { ResultSet rs = null; try { if (insertOnlyIfNoExist) { rs = dbService.getSession().execute(insertIfNoExistsStatement.bind(args[0], args[1], args[2])); Row row ...
@Test @Parameters({"uis/,uis","uis,uis","uis public void shouldGetUrlWithoutSlashAtTheEnd(String inputSuffix, String expectedSuffix) { StaticUrlProvider provider = new StaticUrlProvider(URL_PREFIX + inputSuffix); String result = provider.getBaseUrl(); assertThat(result,is(URL_PREFIX + expectedSuffix)); }
public String getBaseUrl() { return baseUrl; }
StaticUrlProvider implements UrlProvider { public String getBaseUrl() { return baseUrl; } }
StaticUrlProvider implements UrlProvider { public String getBaseUrl() { return baseUrl; } StaticUrlProvider(final String serviceUrl); }
StaticUrlProvider implements UrlProvider { public String getBaseUrl() { return baseUrl; } StaticUrlProvider(final String serviceUrl); String getBaseUrl(); }
StaticUrlProvider implements UrlProvider { public String getBaseUrl() { return baseUrl; } StaticUrlProvider(final String serviceUrl); String getBaseUrl(); }
@Test public void shouldSerializeTheDateSuccessfully() throws ParseException { Date date = dateAdapter.unmarshal(DATE_STRING); assertEquals(cal.getTime(), date); }
@Override public Date unmarshal(String stringDate) throws ParseException { if (stringDate == null || stringDate.isEmpty()) { return null; } try { Date date = GregorianCalendar.getInstance().getTime(); if(date == null){ throw new ParseException("Cannot parse the date. The accepted date format is "+FORMAT, 0); } return d...
DateAdapter extends XmlAdapter<String, Date> { @Override public Date unmarshal(String stringDate) throws ParseException { if (stringDate == null || stringDate.isEmpty()) { return null; } try { Date date = GregorianCalendar.getInstance().getTime(); if(date == null){ throw new ParseException("Cannot parse the date. The a...
DateAdapter extends XmlAdapter<String, Date> { @Override public Date unmarshal(String stringDate) throws ParseException { if (stringDate == null || stringDate.isEmpty()) { return null; } try { Date date = GregorianCalendar.getInstance().getTime(); if(date == null){ throw new ParseException("Cannot parse the date. The a...
DateAdapter extends XmlAdapter<String, Date> { @Override public Date unmarshal(String stringDate) throws ParseException { if (stringDate == null || stringDate.isEmpty()) { return null; } try { Date date = GregorianCalendar.getInstance().getTime(); if(date == null){ throw new ParseException("Cannot parse the date. The a...
DateAdapter extends XmlAdapter<String, Date> { @Override public Date unmarshal(String stringDate) throws ParseException { if (stringDate == null || stringDate.isEmpty()) { return null; } try { Date date = GregorianCalendar.getInstance().getTime(); if(date == null){ throw new ParseException("Cannot parse the date. The a...
@Test public void shouldDeSerializeTheDateSuccessfully() { assertEquals(dateAdapter.marshal(cal.getTime()), DATE_STRING); }
@Override public String marshal(Date date) { if (date == null) { throw new RuntimeException("The revision creation Date shouldn't be null"); } return FORMATTER.format(date); }
DateAdapter extends XmlAdapter<String, Date> { @Override public String marshal(Date date) { if (date == null) { throw new RuntimeException("The revision creation Date shouldn't be null"); } return FORMATTER.format(date); } }
DateAdapter extends XmlAdapter<String, Date> { @Override public String marshal(Date date) { if (date == null) { throw new RuntimeException("The revision creation Date shouldn't be null"); } return FORMATTER.format(date); } }
DateAdapter extends XmlAdapter<String, Date> { @Override public String marshal(Date date) { if (date == null) { throw new RuntimeException("The revision creation Date shouldn't be null"); } return FORMATTER.format(date); } @Override String marshal(Date date); @Override Date unmarshal(String stringDate); }
DateAdapter extends XmlAdapter<String, Date> { @Override public String marshal(Date date) { if (date == null) { throw new RuntimeException("The revision creation Date shouldn't be null"); } return FORMATTER.format(date); } @Override String marshal(Date date); @Override Date unmarshal(String stringDate); }
@Test(expected = ParseException.class) public void shouldThrowParsingException() throws ParseException { String unParsedDateString = "2017-11-23"; dateAdapter.unmarshal(unParsedDateString); }
@Override public Date unmarshal(String stringDate) throws ParseException { if (stringDate == null || stringDate.isEmpty()) { return null; } try { Date date = GregorianCalendar.getInstance().getTime(); if(date == null){ throw new ParseException("Cannot parse the date. The accepted date format is "+FORMAT, 0); } return d...
DateAdapter extends XmlAdapter<String, Date> { @Override public Date unmarshal(String stringDate) throws ParseException { if (stringDate == null || stringDate.isEmpty()) { return null; } try { Date date = GregorianCalendar.getInstance().getTime(); if(date == null){ throw new ParseException("Cannot parse the date. The a...
DateAdapter extends XmlAdapter<String, Date> { @Override public Date unmarshal(String stringDate) throws ParseException { if (stringDate == null || stringDate.isEmpty()) { return null; } try { Date date = GregorianCalendar.getInstance().getTime(); if(date == null){ throw new ParseException("Cannot parse the date. The a...
DateAdapter extends XmlAdapter<String, Date> { @Override public Date unmarshal(String stringDate) throws ParseException { if (stringDate == null || stringDate.isEmpty()) { return null; } try { Date date = GregorianCalendar.getInstance().getTime(); if(date == null){ throw new ParseException("Cannot parse the date. The a...
DateAdapter extends XmlAdapter<String, Date> { @Override public Date unmarshal(String stringDate) throws ParseException { if (stringDate == null || stringDate.isEmpty()) { return null; } try { Date date = GregorianCalendar.getInstance().getTime(); if(date == null){ throw new ParseException("Cannot parse the date. The a...
@Test public void shouldCreateNullDateInCaseEmptyOrNull() throws ParseException { assertNull(dateAdapter.unmarshal(null)); assertNull(dateAdapter.unmarshal("")); }
@Override public Date unmarshal(String stringDate) throws ParseException { if (stringDate == null || stringDate.isEmpty()) { return null; } try { Date date = GregorianCalendar.getInstance().getTime(); if(date == null){ throw new ParseException("Cannot parse the date. The accepted date format is "+FORMAT, 0); } return d...
DateAdapter extends XmlAdapter<String, Date> { @Override public Date unmarshal(String stringDate) throws ParseException { if (stringDate == null || stringDate.isEmpty()) { return null; } try { Date date = GregorianCalendar.getInstance().getTime(); if(date == null){ throw new ParseException("Cannot parse the date. The a...
DateAdapter extends XmlAdapter<String, Date> { @Override public Date unmarshal(String stringDate) throws ParseException { if (stringDate == null || stringDate.isEmpty()) { return null; } try { Date date = GregorianCalendar.getInstance().getTime(); if(date == null){ throw new ParseException("Cannot parse the date. The a...
DateAdapter extends XmlAdapter<String, Date> { @Override public Date unmarshal(String stringDate) throws ParseException { if (stringDate == null || stringDate.isEmpty()) { return null; } try { Date date = GregorianCalendar.getInstance().getTime(); if(date == null){ throw new ParseException("Cannot parse the date. The a...
DateAdapter extends XmlAdapter<String, Date> { @Override public Date unmarshal(String stringDate) throws ParseException { if (stringDate == null || stringDate.isEmpty()) { return null; } try { Date date = GregorianCalendar.getInstance().getTime(); if(date == null){ throw new ParseException("Cannot parse the date. The a...
@Test(expected = RuntimeException.class) public void shouldThrowRunTimeException() { dateAdapter.marshal(null); }
@Override public String marshal(Date date) { if (date == null) { throw new RuntimeException("The revision creation Date shouldn't be null"); } return FORMATTER.format(date); }
DateAdapter extends XmlAdapter<String, Date> { @Override public String marshal(Date date) { if (date == null) { throw new RuntimeException("The revision creation Date shouldn't be null"); } return FORMATTER.format(date); } }
DateAdapter extends XmlAdapter<String, Date> { @Override public String marshal(Date date) { if (date == null) { throw new RuntimeException("The revision creation Date shouldn't be null"); } return FORMATTER.format(date); } }
DateAdapter extends XmlAdapter<String, Date> { @Override public String marshal(Date date) { if (date == null) { throw new RuntimeException("The revision creation Date shouldn't be null"); } return FORMATTER.format(date); } @Override String marshal(Date date); @Override Date unmarshal(String stringDate); }
DateAdapter extends XmlAdapter<String, Date> { @Override public String marshal(Date date) { if (date == null) { throw new RuntimeException("The revision creation Date shouldn't be null"); } return FORMATTER.format(date); } @Override String marshal(Date date); @Override Date unmarshal(String stringDate); }
@Test public void testFindAclChildrenForNotExistingAcl() { AclObjectIdentity newAoi = new AclObjectIdentity(); newAoi.setId("invalid"); newAoi.setObjectClass(aoi_class); newAoi.setOwnerId(sid1); List<AclObjectIdentity> children = service .findAclObjectIdentityChildren(newAoi); assertTrue(children.isEmpty()); }
@Override public List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentityChildren: objectIdentity: " + objectId); } ResultSet resultSet = session.execute(QueryBuilder.select().all().from(keys...
CassandraAclRepository implements AclRepository { @Override public List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentityChildren: objectIdentity: " + objectId); } ResultSet resultSet = ses...
CassandraAclRepository implements AclRepository { @Override public List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentityChildren: objectIdentity: " + objectId); } ResultSet resultSet = ses...
CassandraAclRepository implements AclRepository { @Override public List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentityChildren: objectIdentity: " + objectId); } ResultSet resultSet = ses...
CassandraAclRepository implements AclRepository { @Override public List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentityChildren: objectIdentity: " + objectId); } ResultSet resultSet = ses...
@Test(expected = IllegalArgumentException.class) public void testFindNullAclChildren() { service.findAclObjectIdentityChildren(null); }
@Override public List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentityChildren: objectIdentity: " + objectId); } ResultSet resultSet = session.execute(QueryBuilder.select().all().from(keys...
CassandraAclRepository implements AclRepository { @Override public List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentityChildren: objectIdentity: " + objectId); } ResultSet resultSet = ses...
CassandraAclRepository implements AclRepository { @Override public List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentityChildren: objectIdentity: " + objectId); } ResultSet resultSet = ses...
CassandraAclRepository implements AclRepository { @Override public List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentityChildren: objectIdentity: " + objectId); } ResultSet resultSet = ses...
CassandraAclRepository implements AclRepository { @Override public List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentityChildren: objectIdentity: " + objectId); } ResultSet resultSet = ses...
@Test(expected = IllegalArgumentException.class) public void testFindAclChildrenWithNullValues() { AclObjectIdentity newAoi = new AclObjectIdentity(); service.findAclObjectIdentityChildren(newAoi); }
@Override public List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentityChildren: objectIdentity: " + objectId); } ResultSet resultSet = session.execute(QueryBuilder.select().all().from(keys...
CassandraAclRepository implements AclRepository { @Override public List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentityChildren: objectIdentity: " + objectId); } ResultSet resultSet = ses...
CassandraAclRepository implements AclRepository { @Override public List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentityChildren: objectIdentity: " + objectId); } ResultSet resultSet = ses...
CassandraAclRepository implements AclRepository { @Override public List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentityChildren: objectIdentity: " + objectId); } ResultSet resultSet = ses...
CassandraAclRepository implements AclRepository { @Override public List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentityChildren: objectIdentity: " + objectId); } ResultSet resultSet = ses...
@Test(expected = Exception.class) public void shouldRetry5TimesBeforeFailing() { doThrow(Exception.class).when(subTaskInfoDAO).removeNotifications(eq(TASK_ID)); removerImpl.removeNotifications(TASK_ID); verify(subTaskInfoDAO, times(6)).removeNotifications((eq(TASK_ID))); }
@Override public void removeNotifications(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { subTaskInfoDAO.removeNotifications(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the logs. Retries left: " + retries); waitForTheNextCall(); } else { LOGGER.error...
RemoverImpl implements Remover { @Override public void removeNotifications(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { subTaskInfoDAO.removeNotifications(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the logs. Retries left: " + retries); waitForThe...
RemoverImpl implements Remover { @Override public void removeNotifications(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { subTaskInfoDAO.removeNotifications(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the logs. Retries left: " + retries); waitForThe...
RemoverImpl implements Remover { @Override public void removeNotifications(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { subTaskInfoDAO.removeNotifications(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the logs. Retries left: " + retries); waitForThe...
RemoverImpl implements Remover { @Override public void removeNotifications(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { subTaskInfoDAO.removeNotifications(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the logs. Retries left: " + retries); waitForThe...
@Test(expected = IllegalArgumentException.class) public void testUpdateNullAcl() { service.updateAcl(null, null); }
@Override public void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries) throws AclNotFoundException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN updateAcl: aclObjectIdentity: " + aoi + ", entries: " + entries); } AclObjectIdentity persistedAoi = findAclObjectIdentity(aoi); if (per...
CassandraAclRepository implements AclRepository { @Override public void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries) throws AclNotFoundException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN updateAcl: aclObjectIdentity: " + aoi + ", entries: " + entries); } AclObjectIdentity ...
CassandraAclRepository implements AclRepository { @Override public void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries) throws AclNotFoundException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN updateAcl: aclObjectIdentity: " + aoi + ", entries: " + entries); } AclObjectIdentity ...
CassandraAclRepository implements AclRepository { @Override public void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries) throws AclNotFoundException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN updateAcl: aclObjectIdentity: " + aoi + ", entries: " + entries); } AclObjectIdentity ...
CassandraAclRepository implements AclRepository { @Override public void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries) throws AclNotFoundException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN updateAcl: aclObjectIdentity: " + aoi + ", entries: " + entries); } AclObjectIdentity ...
@Test(expected = AclNotFoundException.class) public void testUpdateAclNotExisting() { AclObjectIdentity newAoi = new AclObjectIdentity(); newAoi.setId("invalid"); newAoi.setObjectClass(aoi_class); newAoi.setOwnerId(sid1); service.updateAcl(newAoi, new ArrayList<AclEntry>()); }
@Override public void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries) throws AclNotFoundException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN updateAcl: aclObjectIdentity: " + aoi + ", entries: " + entries); } AclObjectIdentity persistedAoi = findAclObjectIdentity(aoi); if (per...
CassandraAclRepository implements AclRepository { @Override public void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries) throws AclNotFoundException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN updateAcl: aclObjectIdentity: " + aoi + ", entries: " + entries); } AclObjectIdentity ...
CassandraAclRepository implements AclRepository { @Override public void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries) throws AclNotFoundException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN updateAcl: aclObjectIdentity: " + aoi + ", entries: " + entries); } AclObjectIdentity ...
CassandraAclRepository implements AclRepository { @Override public void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries) throws AclNotFoundException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN updateAcl: aclObjectIdentity: " + aoi + ", entries: " + entries); } AclObjectIdentity ...
CassandraAclRepository implements AclRepository { @Override public void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries) throws AclNotFoundException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN updateAcl: aclObjectIdentity: " + aoi + ", entries: " + entries); } AclObjectIdentity ...
@Test(expected = IllegalArgumentException.class) public void testSaveNullAcl() { service.saveAcl(null); }
@Override public void saveAcl(AclObjectIdentity aoi) throws AclAlreadyExistsException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN saveAcl: aclObjectIdentity: " + aoi); } if (findAclObjectIdentity(aoi) != null) { throw new AclAlreadyExistsException("Object identity '" + aoi + "' already ...
CassandraAclRepository implements AclRepository { @Override public void saveAcl(AclObjectIdentity aoi) throws AclAlreadyExistsException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN saveAcl: aclObjectIdentity: " + aoi); } if (findAclObjectIdentity(aoi) != null) { throw new AclAlreadyExist...
CassandraAclRepository implements AclRepository { @Override public void saveAcl(AclObjectIdentity aoi) throws AclAlreadyExistsException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN saveAcl: aclObjectIdentity: " + aoi); } if (findAclObjectIdentity(aoi) != null) { throw new AclAlreadyExist...
CassandraAclRepository implements AclRepository { @Override public void saveAcl(AclObjectIdentity aoi) throws AclAlreadyExistsException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN saveAcl: aclObjectIdentity: " + aoi); } if (findAclObjectIdentity(aoi) != null) { throw new AclAlreadyExist...
CassandraAclRepository implements AclRepository { @Override public void saveAcl(AclObjectIdentity aoi) throws AclAlreadyExistsException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN saveAcl: aclObjectIdentity: " + aoi); } if (findAclObjectIdentity(aoi) != null) { throw new AclAlreadyExist...
@Test(expected = AclAlreadyExistsException.class) public void testSaveAclAlreadyExisting() { AclObjectIdentity newAoi = createDefaultTestAOI(); service.saveAcl(newAoi); service.saveAcl(newAoi); }
@Override public void saveAcl(AclObjectIdentity aoi) throws AclAlreadyExistsException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN saveAcl: aclObjectIdentity: " + aoi); } if (findAclObjectIdentity(aoi) != null) { throw new AclAlreadyExistsException("Object identity '" + aoi + "' already ...
CassandraAclRepository implements AclRepository { @Override public void saveAcl(AclObjectIdentity aoi) throws AclAlreadyExistsException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN saveAcl: aclObjectIdentity: " + aoi); } if (findAclObjectIdentity(aoi) != null) { throw new AclAlreadyExist...
CassandraAclRepository implements AclRepository { @Override public void saveAcl(AclObjectIdentity aoi) throws AclAlreadyExistsException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN saveAcl: aclObjectIdentity: " + aoi); } if (findAclObjectIdentity(aoi) != null) { throw new AclAlreadyExist...
CassandraAclRepository implements AclRepository { @Override public void saveAcl(AclObjectIdentity aoi) throws AclAlreadyExistsException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN saveAcl: aclObjectIdentity: " + aoi); } if (findAclObjectIdentity(aoi) != null) { throw new AclAlreadyExist...
CassandraAclRepository implements AclRepository { @Override public void saveAcl(AclObjectIdentity aoi) throws AclAlreadyExistsException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN saveAcl: aclObjectIdentity: " + aoi); } if (findAclObjectIdentity(aoi) != null) { throw new AclAlreadyExist...
@Test(expected = IllegalArgumentException.class) public void testDeleteNullAcl() { service.deleteAcls(null); }
@Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectIdsToDelete.size()); for (AclObjectIdentity entry :...
CassandraAclRepository implements AclRepository { @Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectI...
CassandraAclRepository implements AclRepository { @Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectI...
CassandraAclRepository implements AclRepository { @Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectI...
CassandraAclRepository implements AclRepository { @Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectI...
@Test public void testDeleteAclNotExisting() { AclObjectIdentity newAoi = new AclObjectIdentity(); newAoi.setId("invalid"); newAoi.setObjectClass(aoi_class); newAoi.setOwnerId(sid1); service.deleteAcls(Arrays.asList(new AclObjectIdentity[] { newAoi })); }
@Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectIdsToDelete.size()); for (AclObjectIdentity entry :...
CassandraAclRepository implements AclRepository { @Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectI...
CassandraAclRepository implements AclRepository { @Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectI...
CassandraAclRepository implements AclRepository { @Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectI...
CassandraAclRepository implements AclRepository { @Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectI...
@Test(expected = IllegalArgumentException.class) public void testDeleteEmptyAclList() { service.deleteAcls(new ArrayList<AclObjectIdentity>()); }
@Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectIdsToDelete.size()); for (AclObjectIdentity entry :...
CassandraAclRepository implements AclRepository { @Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectI...
CassandraAclRepository implements AclRepository { @Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectI...
CassandraAclRepository implements AclRepository { @Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectI...
CassandraAclRepository implements AclRepository { @Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectI...
@Test(expected = IllegalArgumentException.class) public void testSaveAclWithNullValues() { AclObjectIdentity newAoi = new AclObjectIdentity(); service.saveAcl(newAoi); }
@Override public void saveAcl(AclObjectIdentity aoi) throws AclAlreadyExistsException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN saveAcl: aclObjectIdentity: " + aoi); } if (findAclObjectIdentity(aoi) != null) { throw new AclAlreadyExistsException("Object identity '" + aoi + "' already ...
CassandraAclRepository implements AclRepository { @Override public void saveAcl(AclObjectIdentity aoi) throws AclAlreadyExistsException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN saveAcl: aclObjectIdentity: " + aoi); } if (findAclObjectIdentity(aoi) != null) { throw new AclAlreadyExist...
CassandraAclRepository implements AclRepository { @Override public void saveAcl(AclObjectIdentity aoi) throws AclAlreadyExistsException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN saveAcl: aclObjectIdentity: " + aoi); } if (findAclObjectIdentity(aoi) != null) { throw new AclAlreadyExist...
CassandraAclRepository implements AclRepository { @Override public void saveAcl(AclObjectIdentity aoi) throws AclAlreadyExistsException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN saveAcl: aclObjectIdentity: " + aoi); } if (findAclObjectIdentity(aoi) != null) { throw new AclAlreadyExist...
CassandraAclRepository implements AclRepository { @Override public void saveAcl(AclObjectIdentity aoi) throws AclAlreadyExistsException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN saveAcl: aclObjectIdentity: " + aoi); } if (findAclObjectIdentity(aoi) != null) { throw new AclAlreadyExist...
@Test(expected = IllegalArgumentException.class) public void testDeleteAclWithNullValues() { AclObjectIdentity newAoi = new AclObjectIdentity(); service.deleteAcls(Arrays.asList(new AclObjectIdentity[] { newAoi })); }
@Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectIdsToDelete.size()); for (AclObjectIdentity entry :...
CassandraAclRepository implements AclRepository { @Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectI...
CassandraAclRepository implements AclRepository { @Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectI...
CassandraAclRepository implements AclRepository { @Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectI...
CassandraAclRepository implements AclRepository { @Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectI...
@Test public void currentBucketShouldBeNull() { Bucket bucket = bucketsHandler.getCurrentBucket(BUCKETS_TABLE_NAME, "sampleObject"); Assert.assertNull(bucket); }
public Bucket getCurrentBucket(String bucketsTableName, String objectId) { String query = "SELECT object_id, bucket_id, rows_count FROM " + bucketsTableName + " WHERE object_id = '" + objectId + "';"; ResultSet rs = session.execute(query); List<Row> rows = rs.all(); Row row = rows.isEmpty() ? null : rows.get(rows.size(...
BucketsHandler { public Bucket getCurrentBucket(String bucketsTableName, String objectId) { String query = "SELECT object_id, bucket_id, rows_count FROM " + bucketsTableName + " WHERE object_id = '" + objectId + "';"; ResultSet rs = session.execute(query); List<Row> rows = rs.all(); Row row = rows.isEmpty() ? null : ro...
BucketsHandler { public Bucket getCurrentBucket(String bucketsTableName, String objectId) { String query = "SELECT object_id, bucket_id, rows_count FROM " + bucketsTableName + " WHERE object_id = '" + objectId + "';"; ResultSet rs = session.execute(query); List<Row> rows = rs.all(); Row row = rows.isEmpty() ? null : ro...
BucketsHandler { public Bucket getCurrentBucket(String bucketsTableName, String objectId) { String query = "SELECT object_id, bucket_id, rows_count FROM " + bucketsTableName + " WHERE object_id = '" + objectId + "';"; ResultSet rs = session.execute(query); List<Row> rows = rs.all(); Row row = rows.isEmpty() ? null : ro...
BucketsHandler { public Bucket getCurrentBucket(String bucketsTableName, String objectId) { String query = "SELECT object_id, bucket_id, rows_count FROM " + bucketsTableName + " WHERE object_id = '" + objectId + "';"; ResultSet rs = session.execute(query); List<Row> rows = rs.all(); Row row = rows.isEmpty() ? null : ro...
@Test public void shouldSuccessfullyRemoveErrors() { doNothing().when(taskErrorDAO).removeErrors(eq(TASK_ID)); removerImpl.removeErrorReports(TASK_ID); verify(taskErrorDAO, times(1)).removeErrors((eq(TASK_ID))); }
@Override public void removeErrorReports(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { taskErrorDAO.removeErrors(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the error reports. Retries left: " + retries); waitForTheNextCall(); } else { LOGGER.error(...
RemoverImpl implements Remover { @Override public void removeErrorReports(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { taskErrorDAO.removeErrors(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the error reports. Retries left: " + retries); waitForTheN...
RemoverImpl implements Remover { @Override public void removeErrorReports(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { taskErrorDAO.removeErrors(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the error reports. Retries left: " + retries); waitForTheN...
RemoverImpl implements Remover { @Override public void removeErrorReports(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { taskErrorDAO.removeErrors(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the error reports. Retries left: " + retries); waitForTheN...
RemoverImpl implements Remover { @Override public void removeErrorReports(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { taskErrorDAO.removeErrors(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the error reports. Retries left: " + retries); waitForTheN...
@Test public void shouldCreateNewBucket() { Bucket bucket = new Bucket("sampleObjectId", new com.eaio.uuid.UUID().toString(), 0); bucketsHandler.increaseBucketCount(BUCKETS_TABLE_NAME, bucket); assertResults(bucket, 1); }
public void increaseBucketCount(String bucketsTableName, Bucket bucket) { String query = "UPDATE " + bucketsTableName + " SET rows_count = rows_count + 1 WHERE object_id = '" + bucket.getObjectId() + "' AND bucket_id = " + UUID.fromString(bucket.getBucketId()) + ";"; session.execute(query); }
BucketsHandler { public void increaseBucketCount(String bucketsTableName, Bucket bucket) { String query = "UPDATE " + bucketsTableName + " SET rows_count = rows_count + 1 WHERE object_id = '" + bucket.getObjectId() + "' AND bucket_id = " + UUID.fromString(bucket.getBucketId()) + ";"; session.execute(query); } }
BucketsHandler { public void increaseBucketCount(String bucketsTableName, Bucket bucket) { String query = "UPDATE " + bucketsTableName + " SET rows_count = rows_count + 1 WHERE object_id = '" + bucket.getObjectId() + "' AND bucket_id = " + UUID.fromString(bucket.getBucketId()) + ";"; session.execute(query); } BucketsHa...
BucketsHandler { public void increaseBucketCount(String bucketsTableName, Bucket bucket) { String query = "UPDATE " + bucketsTableName + " SET rows_count = rows_count + 1 WHERE object_id = '" + bucket.getObjectId() + "' AND bucket_id = " + UUID.fromString(bucket.getBucketId()) + ";"; session.execute(query); } BucketsHa...
BucketsHandler { public void increaseBucketCount(String bucketsTableName, Bucket bucket) { String query = "UPDATE " + bucketsTableName + " SET rows_count = rows_count + 1 WHERE object_id = '" + bucket.getObjectId() + "' AND bucket_id = " + UUID.fromString(bucket.getBucketId()) + ";"; session.execute(query); } BucketsHa...
@Test public void shouldUpdateCounterForExistingBucket() { Bucket bucket = new Bucket("sampleObjectId", new com.eaio.uuid.UUID().toString(), 0); bucketsHandler.increaseBucketCount(BUCKETS_TABLE_NAME, bucket); bucketsHandler.increaseBucketCount(BUCKETS_TABLE_NAME, bucket); assertResults(bucket, 2); }
public void increaseBucketCount(String bucketsTableName, Bucket bucket) { String query = "UPDATE " + bucketsTableName + " SET rows_count = rows_count + 1 WHERE object_id = '" + bucket.getObjectId() + "' AND bucket_id = " + UUID.fromString(bucket.getBucketId()) + ";"; session.execute(query); }
BucketsHandler { public void increaseBucketCount(String bucketsTableName, Bucket bucket) { String query = "UPDATE " + bucketsTableName + " SET rows_count = rows_count + 1 WHERE object_id = '" + bucket.getObjectId() + "' AND bucket_id = " + UUID.fromString(bucket.getBucketId()) + ";"; session.execute(query); } }
BucketsHandler { public void increaseBucketCount(String bucketsTableName, Bucket bucket) { String query = "UPDATE " + bucketsTableName + " SET rows_count = rows_count + 1 WHERE object_id = '" + bucket.getObjectId() + "' AND bucket_id = " + UUID.fromString(bucket.getBucketId()) + ";"; session.execute(query); } BucketsHa...
BucketsHandler { public void increaseBucketCount(String bucketsTableName, Bucket bucket) { String query = "UPDATE " + bucketsTableName + " SET rows_count = rows_count + 1 WHERE object_id = '" + bucket.getObjectId() + "' AND bucket_id = " + UUID.fromString(bucket.getBucketId()) + ";"; session.execute(query); } BucketsHa...
BucketsHandler { public void increaseBucketCount(String bucketsTableName, Bucket bucket) { String query = "UPDATE " + bucketsTableName + " SET rows_count = rows_count + 1 WHERE object_id = '" + bucket.getObjectId() + "' AND bucket_id = " + UUID.fromString(bucket.getBucketId()) + ";"; session.execute(query); } BucketsHa...
@Test public void testCountStatisticsSuccessfully() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); byte[] fileData = Files.readAllBytes(Paths.get("src/test/resources/example1.xml")); StormTaskTuple tuple = new StormTaskTuple(TASK_ID, TASK_NAME, SOURCE_VERSION_URL, fileData, new HashMap<String, String>(),...
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { if (!statsAlreadyCalculated(stormTaskTuple)) { LOGGER.info("Calculating file statistics for {}", stormTaskTuple); countStatistics(stormTaskTuple); markRecordStatsAsCalculated(stormTaskTuple); } else { LOGGER.info("File stats will NO...
StatisticsBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { if (!statsAlreadyCalculated(stormTaskTuple)) { LOGGER.info("Calculating file statistics for {}", stormTaskTuple); countStatistics(stormTaskTuple); markRecordStatsAsCalculated(stormTaskTuple);...
StatisticsBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { if (!statsAlreadyCalculated(stormTaskTuple)) { LOGGER.info("Calculating file statistics for {}", stormTaskTuple); countStatistics(stormTaskTuple); markRecordStatsAsCalculated(stormTaskTuple);...
StatisticsBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { if (!statsAlreadyCalculated(stormTaskTuple)) { LOGGER.info("Calculating file statistics for {}", stormTaskTuple); countStatistics(stormTaskTuple); markRecordStatsAsCalculated(stormTaskTuple);...
StatisticsBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { if (!statsAlreadyCalculated(stormTaskTuple)) { LOGGER.info("Calculating file statistics for {}", stormTaskTuple); countStatistics(stormTaskTuple); markRecordStatsAsCalculated(stormTaskTuple);...
@Test public void testAggregatedCountStatisticsSuccessfully() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); Tuple anchorTuple2 = mock(TupleImpl.class); byte[] fileData = Files.readAllBytes(Paths.get("src/test/resources/example1.xml")); StormTaskTuple tuple = new StormTaskTuple(TASK_ID, TASK_NAME, SOURCE...
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { if (!statsAlreadyCalculated(stormTaskTuple)) { LOGGER.info("Calculating file statistics for {}", stormTaskTuple); countStatistics(stormTaskTuple); markRecordStatsAsCalculated(stormTaskTuple); } else { LOGGER.info("File stats will NO...
StatisticsBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { if (!statsAlreadyCalculated(stormTaskTuple)) { LOGGER.info("Calculating file statistics for {}", stormTaskTuple); countStatistics(stormTaskTuple); markRecordStatsAsCalculated(stormTaskTuple);...
StatisticsBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { if (!statsAlreadyCalculated(stormTaskTuple)) { LOGGER.info("Calculating file statistics for {}", stormTaskTuple); countStatistics(stormTaskTuple); markRecordStatsAsCalculated(stormTaskTuple);...
StatisticsBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { if (!statsAlreadyCalculated(stormTaskTuple)) { LOGGER.info("Calculating file statistics for {}", stormTaskTuple); countStatistics(stormTaskTuple); markRecordStatsAsCalculated(stormTaskTuple);...
StatisticsBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { if (!statsAlreadyCalculated(stormTaskTuple)) { LOGGER.info("Calculating file statistics for {}", stormTaskTuple); countStatistics(stormTaskTuple); markRecordStatsAsCalculated(stormTaskTuple);...
@Test public void testCountStatisticsFailed() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); byte[] fileData = Files.readAllBytes(Paths.get("src/test/resources/example1.xml")); fileData[0] = 'X'; StormTaskTuple tuple = new StormTaskTuple(TASK_ID, TASK_NAME, SOURCE_VERSION_URL, fileData, new HashMap<Strin...
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { if (!statsAlreadyCalculated(stormTaskTuple)) { LOGGER.info("Calculating file statistics for {}", stormTaskTuple); countStatistics(stormTaskTuple); markRecordStatsAsCalculated(stormTaskTuple); } else { LOGGER.info("File stats will NO...
StatisticsBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { if (!statsAlreadyCalculated(stormTaskTuple)) { LOGGER.info("Calculating file statistics for {}", stormTaskTuple); countStatistics(stormTaskTuple); markRecordStatsAsCalculated(stormTaskTuple);...
StatisticsBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { if (!statsAlreadyCalculated(stormTaskTuple)) { LOGGER.info("Calculating file statistics for {}", stormTaskTuple); countStatistics(stormTaskTuple); markRecordStatsAsCalculated(stormTaskTuple);...
StatisticsBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { if (!statsAlreadyCalculated(stormTaskTuple)) { LOGGER.info("Calculating file statistics for {}", stormTaskTuple); countStatistics(stormTaskTuple); markRecordStatsAsCalculated(stormTaskTuple);...
StatisticsBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { if (!statsAlreadyCalculated(stormTaskTuple)) { LOGGER.info("Calculating file statistics for {}", stormTaskTuple); countStatistics(stormTaskTuple); markRecordStatsAsCalculated(stormTaskTuple);...
@Test public void validateEdmInternalFile() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); byte[] FILE_DATA = Files.readAllBytes(Paths.get("src/test/resources/Item_35834473_test.xml")); StormTaskTuple tuple = new StormTaskTuple(TASK_ID, TASK_NAME, SOURCE_VERSION_URL, FILE_DATA, prepareStormTaskTupleParam...
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), ...
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(...
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(...
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(...
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(...
@Test public void validateEdmInternalFileWithProvidedRootLocation() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); byte[] FILE_DATA = Files.readAllBytes(Paths.get("src/test/resources/Item_35834473_test.xml")); StormTaskTuple tuple = new StormTaskTuple(TASK_ID, TASK_NAME, SOURCE_VERSION_URL, FILE_DATA, pr...
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), ...
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(...
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(...
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(...
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(...
@Test public void validateEdmExternalFile() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); byte[] FILE_DATA = Files.readAllBytes(Paths.get("src/test/resources/Item_35834473.xml")); StormTaskTuple tuple = new StormTaskTuple(TASK_ID, TASK_NAME, SOURCE_VERSION_URL, FILE_DATA, prepareStormTaskTupleParameters...
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), ...
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(...
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(...
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(...
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(...
@Test public void validateEdmExternalOutOfOrderFile() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); byte[] FILE_DATA = Files.readAllBytes(Paths.get("src/test/resources/edmExternalWithOutOfOrderElements.xml")); StormTaskTuple tuple = new StormTaskTuple(TASK_ID, TASK_NAME, SOURCE_VERSION_URL, FILE_DATA, p...
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), ...
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(...
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(...
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(...
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(...
@Test public void sendErrorNotificationWhenTheValidationFails() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); byte[] FILE_DATA = Files.readAllBytes(Paths.get("src/test/resources/Item_35834473_test.xml")); StormTaskTuple tuple = new StormTaskTuple(TASK_ID, TASK_NAME, SOURCE_VERSION_URL, FILE_DATA, prepar...
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), ...
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(...
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(...
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(...
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(...
@Test(expected = Exception.class) public void shouldRetry5TimesBeforeFailingWhileRemovingErrorReports() { doThrow(Exception.class).when(taskErrorDAO).removeErrors(eq(TASK_ID)); removerImpl.removeErrorReports(TASK_ID); verify(taskErrorDAO, times(6)).removeErrors((eq(TASK_ID))); }
@Override public void removeErrorReports(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { taskErrorDAO.removeErrors(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the error reports. Retries left: " + retries); waitForTheNextCall(); } else { LOGGER.error(...
RemoverImpl implements Remover { @Override public void removeErrorReports(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { taskErrorDAO.removeErrors(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the error reports. Retries left: " + retries); waitForTheN...
RemoverImpl implements Remover { @Override public void removeErrorReports(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { taskErrorDAO.removeErrors(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the error reports. Retries left: " + retries); waitForTheN...
RemoverImpl implements Remover { @Override public void removeErrorReports(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { taskErrorDAO.removeErrors(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the error reports. Retries left: " + retries); waitForTheN...
RemoverImpl implements Remover { @Override public void removeErrorReports(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { taskErrorDAO.removeErrors(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the error reports. Retries left: " + retries); waitForTheN...
@Test public void nodeContentsSizeShouldBeSmallerThanMaximumSize() throws Exception { String fileContent = readFile("src/test/resources/BigContent.xml"); RecordStatisticsGenerator xmlParser = new RecordStatisticsGenerator(fileContent); List<NodeStatistics> nodeModelList = xmlParser.getStatistics(); for (NodeStatistics ...
public List<NodeStatistics> getStatistics() throws SAXException, IOException, ParserConfigurationException { Document doc = getParsedDocument(); doc.getDocumentElement().normalize(); Node root = doc.getDocumentElement(); addRootToNodeList(root); prepareNodeStatistics(root); return new ArrayList<>(nodeStatistics.values(...
RecordStatisticsGenerator { public List<NodeStatistics> getStatistics() throws SAXException, IOException, ParserConfigurationException { Document doc = getParsedDocument(); doc.getDocumentElement().normalize(); Node root = doc.getDocumentElement(); addRootToNodeList(root); prepareNodeStatistics(root); return new ArrayL...
RecordStatisticsGenerator { public List<NodeStatistics> getStatistics() throws SAXException, IOException, ParserConfigurationException { Document doc = getParsedDocument(); doc.getDocumentElement().normalize(); Node root = doc.getDocumentElement(); addRootToNodeList(root); prepareNodeStatistics(root); return new ArrayL...
RecordStatisticsGenerator { public List<NodeStatistics> getStatistics() throws SAXException, IOException, ParserConfigurationException { Document doc = getParsedDocument(); doc.getDocumentElement().normalize(); Node root = doc.getDocumentElement(); addRootToNodeList(root); prepareNodeStatistics(root); return new ArrayL...
RecordStatisticsGenerator { public List<NodeStatistics> getStatistics() throws SAXException, IOException, ParserConfigurationException { Document doc = getParsedDocument(); doc.getDocumentElement().normalize(); Node root = doc.getDocumentElement(); addRootToNodeList(root); prepareNodeStatistics(root); return new ArrayL...
@Test public void shouldEnrichTheFileSuccessfullyAndSendItToTheNextBolt() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); try (InputStream stream = this.getClass().getResourceAsStream("/files/Item_35834473.xml")) { when(fileClient.getFile(eq(FILE_URL), eq(AUTHORIZATION), eq(AUTHORIZATION))).thenReturn(str...
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { EnrichedRdf enrichedRdf = deserializer...
EDMEnrichmentBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple))...
EDMEnrichmentBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple))...
EDMEnrichmentBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple))...
EDMEnrichmentBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple))...
@Test public void shouldEnrichTheFileSuccessfullyOnMultipleBatchesAndSendItToTheNextBolt() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); try (InputStream stream = this.getClass().getResourceAsStream("/files/Item_35834473.xml")) { when(fileClient.getFile(eq(FILE_URL), eq(AUTHORIZATION), eq(AUTHORIZATION)...
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { EnrichedRdf enrichedRdf = deserializer...
EDMEnrichmentBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple))...
EDMEnrichmentBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple))...
EDMEnrichmentBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple))...
EDMEnrichmentBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple))...
@Test public void shouldForwardTheTupleWhenNoResourceLinkFound() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); edmEnrichmentBolt.execute(anchorTuple, stormTaskTuple); int expectedParametersSize = 2; Map<String, String> initialTupleParameters = stormTaskTuple.getParameters(); assertEquals(expectedParamet...
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { EnrichedRdf enrichedRdf = deserializer...
EDMEnrichmentBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple))...
EDMEnrichmentBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple))...
EDMEnrichmentBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple))...
EDMEnrichmentBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple))...
@Test public void shouldLogTheExceptionAndSendItAsParameterToTheNextBolt() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); try (InputStream stream = this.getClass().getResourceAsStream("/files/Item_35834473.xml")) { when(fileClient.getFile(eq(FILE_URL), eq(AUTHORIZATION), eq(AUTHORIZATION))).thenReturn(st...
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { EnrichedRdf enrichedRdf = deserializer...
EDMEnrichmentBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple))...
EDMEnrichmentBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple))...
EDMEnrichmentBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple))...
EDMEnrichmentBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple))...
@Test public void shouldSuccessfullyProcessTheResource() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT, Integer.toString(5)); stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_LINK_KEY, "{\"resourceUrl\":\"http: String resourceN...
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { outputCollec...
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterK...
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterK...
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterK...
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterK...
@Test public void shouldDropTheTaskAndStopProcessing() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT, Integer.toString(5)); stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_LINK_KEY, "{\"resourceUrl\":\"http: String resourceNam...
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { outputCollec...
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterK...
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterK...
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterK...
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterK...
@Test public void shouldFormulateTheAggregateExceptionsWhenSavingToAmazonFails() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT, Integer.toString(5)); stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_LINK_KEY, "{\"resourceUrl\":...
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { outputCollec...
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterK...
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterK...
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterK...
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterK...
@Test public void shouldSendExceptionsWhenProcessingFails() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT, Integer.toString(5)); stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_LINK_KEY, "{\"resourceUrl\":\"http: doThrow(Media...
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { outputCollec...
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterK...
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterK...
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterK...
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterK...
@Test public void shouldForwardTheTupleWhenNoResourceLinkFound() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); resourceProcessingBolt.execute(anchorTuple, stormTaskTuple); int expectedParametersSize = 2; assertEquals(expectedParametersSize, stormTaskTuple.getParameters().size()); verify(outputCollector,...
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { outputCollec...
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterK...
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterK...
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterK...
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterK...
@Test public void shouldSuccessfullyRemoveStatistics() { doNothing().when(cassandraNodeStatisticsDAO).removeStatistics(eq(TASK_ID)); removerImpl.removeStatistics(TASK_ID); verify(cassandraNodeStatisticsDAO, times(1)).removeStatistics((eq(TASK_ID))); }
@Override public void removeStatistics(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { cassandraNodeStatisticsDAO.removeStatistics(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the validation statistics. Retries left: " + retries); waitForTheNextCall()...
RemoverImpl implements Remover { @Override public void removeStatistics(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { cassandraNodeStatisticsDAO.removeStatistics(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the validation statistics. Retries left: "...
RemoverImpl implements Remover { @Override public void removeStatistics(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { cassandraNodeStatisticsDAO.removeStatistics(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the validation statistics. Retries left: "...
RemoverImpl implements Remover { @Override public void removeStatistics(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { cassandraNodeStatisticsDAO.removeStatistics(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the validation statistics. Retries left: "...
RemoverImpl implements Remover { @Override public void removeStatistics(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { cassandraNodeStatisticsDAO.removeStatistics(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the validation statistics. Retries left: "...
@Test public void deactivateShouldClearTheTaskQueue() throws Exception { final int taskCount = 10; for (int i = 0; i < taskCount; i++) { httpKafkaSpout.taskDownloader.taskQueue.put(new DpsTask()); } assertTrue(!httpKafkaSpout.taskDownloader.taskQueue.isEmpty()); httpKafkaSpout.deactivate(); assertTrue(httpKafkaSpout.ta...
@Override public void deactivate() { LOGGER.info("Deactivate method was executed"); deactivateWaitingTasks(); deactivateCurrentTask(); LOGGER.info("Deactivate method was finished"); }
HttpKafkaSpout extends CustomKafkaSpout { @Override public void deactivate() { LOGGER.info("Deactivate method was executed"); deactivateWaitingTasks(); deactivateCurrentTask(); LOGGER.info("Deactivate method was finished"); } }
HttpKafkaSpout extends CustomKafkaSpout { @Override public void deactivate() { LOGGER.info("Deactivate method was executed"); deactivateWaitingTasks(); deactivateCurrentTask(); LOGGER.info("Deactivate method was finished"); } HttpKafkaSpout(KafkaSpoutConfig spoutConf); HttpKafkaSpout(KafkaSpoutConfig spoutConf, String...
HttpKafkaSpout extends CustomKafkaSpout { @Override public void deactivate() { LOGGER.info("Deactivate method was executed"); deactivateWaitingTasks(); deactivateCurrentTask(); LOGGER.info("Deactivate method was finished"); } HttpKafkaSpout(KafkaSpoutConfig spoutConf); HttpKafkaSpout(KafkaSpoutConfig spoutConf, String...
HttpKafkaSpout extends CustomKafkaSpout { @Override public void deactivate() { LOGGER.info("Deactivate method was executed"); deactivateWaitingTasks(); deactivateCurrentTask(); LOGGER.info("Deactivate method was finished"); } HttpKafkaSpout(KafkaSpoutConfig spoutConf); HttpKafkaSpout(KafkaSpoutConfig spoutConf, String...
@Test public void shouldUnpackTheZipFilesRecursively() throws CompressionExtensionNotRecognizedException, IOException { zipUnpackingService.unpackFile(DESTINATION_DIR + FILE_NAME + ZIP_EXTENSION, DESTINATION_DIR); Collection files = getXMLFiles(DESTINATION_DIR + DEFAULT_DESTINATION_NAME); assertNotNull(files); assertEq...
public void unpackFile(final String compressedFilePath, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { final List<String> zipFiles = new ArrayList<>(); ZipUtil.unpack(new File(compressedFilePath), new File(destinationFolder), new NameMapper() { public String map(String ...
ZipUnpackingService implements FileUnpackingService { public void unpackFile(final String compressedFilePath, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { final List<String> zipFiles = new ArrayList<>(); ZipUtil.unpack(new File(compressedFilePath), new File(destinatio...
ZipUnpackingService implements FileUnpackingService { public void unpackFile(final String compressedFilePath, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { final List<String> zipFiles = new ArrayList<>(); ZipUtil.unpack(new File(compressedFilePath), new File(destinatio...
ZipUnpackingService implements FileUnpackingService { public void unpackFile(final String compressedFilePath, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { final List<String> zipFiles = new ArrayList<>(); ZipUtil.unpack(new File(compressedFilePath), new File(destinatio...
ZipUnpackingService implements FileUnpackingService { public void unpackFile(final String compressedFilePath, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { final List<String> zipFiles = new ArrayList<>(); ZipUtil.unpack(new File(compressedFilePath), new File(destinatio...
@Test public void shouldUnpackTheZipFilesWithNestedFoldersRecursively() throws CompressionExtensionNotRecognizedException, IOException { zipUnpackingService.unpackFile(DESTINATION_DIR + FILE_NAME2 + ZIP_EXTENSION, DESTINATION_DIR); Collection files = getXMLFiles(DESTINATION_DIR + DEFAULT_DESTINATION_NAME); assertNotNul...
public void unpackFile(final String compressedFilePath, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { final List<String> zipFiles = new ArrayList<>(); ZipUtil.unpack(new File(compressedFilePath), new File(destinationFolder), new NameMapper() { public String map(String ...
ZipUnpackingService implements FileUnpackingService { public void unpackFile(final String compressedFilePath, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { final List<String> zipFiles = new ArrayList<>(); ZipUtil.unpack(new File(compressedFilePath), new File(destinatio...
ZipUnpackingService implements FileUnpackingService { public void unpackFile(final String compressedFilePath, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { final List<String> zipFiles = new ArrayList<>(); ZipUtil.unpack(new File(compressedFilePath), new File(destinatio...
ZipUnpackingService implements FileUnpackingService { public void unpackFile(final String compressedFilePath, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { final List<String> zipFiles = new ArrayList<>(); ZipUtil.unpack(new File(compressedFilePath), new File(destinatio...
ZipUnpackingService implements FileUnpackingService { public void unpackFile(final String compressedFilePath, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { final List<String> zipFiles = new ArrayList<>(); ZipUtil.unpack(new File(compressedFilePath), new File(destinatio...
@Test public void shouldUnpackTheZipFilesWithNestedMixedCompressedFiles() throws CompressionExtensionNotRecognizedException, IOException { zipUnpackingService.unpackFile(DESTINATION_DIR + FILE_NAME3 + ZIP_EXTENSION, DESTINATION_DIR); Collection files = getXMLFiles(DESTINATION_DIR + DEFAULT_DESTINATION_NAME); assertNotN...
public void unpackFile(final String compressedFilePath, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { final List<String> zipFiles = new ArrayList<>(); ZipUtil.unpack(new File(compressedFilePath), new File(destinationFolder), new NameMapper() { public String map(String ...
ZipUnpackingService implements FileUnpackingService { public void unpackFile(final String compressedFilePath, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { final List<String> zipFiles = new ArrayList<>(); ZipUtil.unpack(new File(compressedFilePath), new File(destinatio...
ZipUnpackingService implements FileUnpackingService { public void unpackFile(final String compressedFilePath, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { final List<String> zipFiles = new ArrayList<>(); ZipUtil.unpack(new File(compressedFilePath), new File(destinatio...
ZipUnpackingService implements FileUnpackingService { public void unpackFile(final String compressedFilePath, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { final List<String> zipFiles = new ArrayList<>(); ZipUtil.unpack(new File(compressedFilePath), new File(destinatio...
ZipUnpackingService implements FileUnpackingService { public void unpackFile(final String compressedFilePath, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { final List<String> zipFiles = new ArrayList<>(); ZipUtil.unpack(new File(compressedFilePath), new File(destinatio...
@Test public void shouldUnpackTheTarGzFilesRecursively() throws CompressionExtensionNotRecognizedException, IOException { gzUnpackingService.unpackFile(DESTINATION_DIR + FILE_NAME + ".tar.gz", DESTINATION_DIR); Collection files = getXMLFiles(DESTINATION_DIR + FILE_NAME); assertNotNull(files); assertEquals(XML_FILES_COU...
public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); }
GzUnpackingService implements FileUnpackingService { public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); } }
GzUnpackingService implements FileUnpackingService { public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); } }
GzUnpackingService implements FileUnpackingService { public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); } v...
GzUnpackingService implements FileUnpackingService { public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); } v...
@Test public void shouldUnpackTheTarGzFilesRecursivelyWithCompressedXMLFiles() throws CompressionExtensionNotRecognizedException, IOException { gzUnpackingService.unpackFile(DESTINATION_DIR + FILE_NAME2 + ".tar.gz", DESTINATION_DIR); Collection files = getXMLFiles(DESTINATION_DIR + FILE_NAME2); assertNotNull(files); as...
public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); }
GzUnpackingService implements FileUnpackingService { public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); } }
GzUnpackingService implements FileUnpackingService { public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); } }
GzUnpackingService implements FileUnpackingService { public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); } v...
GzUnpackingService implements FileUnpackingService { public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); } v...
@Test public void shouldUnpackTheTGZFilesRecursivelyWithCompressedXMLFiles() throws CompressionExtensionNotRecognizedException, IOException { gzUnpackingService.unpackFile(DESTINATION_DIR + FILE_NAME2 + ".tgz", DESTINATION_DIR); Collection files = getXMLFiles(DESTINATION_DIR + FILE_NAME2); assertNotNull(files); assertE...
public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); }
GzUnpackingService implements FileUnpackingService { public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); } }
GzUnpackingService implements FileUnpackingService { public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); } }
GzUnpackingService implements FileUnpackingService { public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); } v...
GzUnpackingService implements FileUnpackingService { public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); } v...
@Test public void shouldUnpackTheTarGzFilesRecursivelyWithMixedNestedCompressedFiles() throws CompressionExtensionNotRecognizedException, IOException { gzUnpackingService.unpackFile(DESTINATION_DIR + FILE_NAME3 + ".tar.gz", DESTINATION_DIR); Collection files = getXMLFiles(DESTINATION_DIR + FILE_NAME3); assertNotNull(fi...
public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); }
GzUnpackingService implements FileUnpackingService { public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); } }
GzUnpackingService implements FileUnpackingService { public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); } }
GzUnpackingService implements FileUnpackingService { public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); } v...
GzUnpackingService implements FileUnpackingService { public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); } v...
@Test public void shouldReturnZipService() throws CompressionExtensionNotRecognizedException { FileUnpackingService fileUnpackingService = UnpackingServiceFactory.createUnpackingService(ZIP_EXTENSION); assertTrue(fileUnpackingService instanceof ZipUnpackingService); }
public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(CompressionFileExtension.GZIP.getExt...
UnpackingServiceFactory { public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(Compressio...
UnpackingServiceFactory { public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(Compressio...
UnpackingServiceFactory { public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(Compressio...
UnpackingServiceFactory { public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(Compressio...
@Test public void shouldReturnGZipService() throws CompressionExtensionNotRecognizedException { FileUnpackingService fileUnpackingService = UnpackingServiceFactory.createUnpackingService(GZIP_EXTENSION); assertTrue(fileUnpackingService instanceof GzUnpackingService); }
public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(CompressionFileExtension.GZIP.getExt...
UnpackingServiceFactory { public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(Compressio...
UnpackingServiceFactory { public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(Compressio...
UnpackingServiceFactory { public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(Compressio...
UnpackingServiceFactory { public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(Compressio...
@Test(expected = Exception.class) public void shouldRetry5TimesBeforeFailingWhileRemovingStatistics() { doThrow(Exception.class).when(cassandraNodeStatisticsDAO).removeStatistics(eq(TASK_ID)); removerImpl.removeStatistics(TASK_ID); verify(cassandraNodeStatisticsDAO, times(6)).removeStatistics((eq(TASK_ID))); }
@Override public void removeStatistics(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { cassandraNodeStatisticsDAO.removeStatistics(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the validation statistics. Retries left: " + retries); waitForTheNextCall()...
RemoverImpl implements Remover { @Override public void removeStatistics(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { cassandraNodeStatisticsDAO.removeStatistics(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the validation statistics. Retries left: "...
RemoverImpl implements Remover { @Override public void removeStatistics(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { cassandraNodeStatisticsDAO.removeStatistics(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the validation statistics. Retries left: "...
RemoverImpl implements Remover { @Override public void removeStatistics(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { cassandraNodeStatisticsDAO.removeStatistics(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the validation statistics. Retries left: "...
RemoverImpl implements Remover { @Override public void removeStatistics(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { cassandraNodeStatisticsDAO.removeStatistics(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the validation statistics. Retries left: "...
@Test public void shouldReturnGZipServiceFotTGZExtension() throws CompressionExtensionNotRecognizedException { FileUnpackingService fileUnpackingService = UnpackingServiceFactory.createUnpackingService(TGZIP_EXTENSION); assertTrue(fileUnpackingService instanceof GzUnpackingService); }
public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(CompressionFileExtension.GZIP.getExt...
UnpackingServiceFactory { public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(Compressio...
UnpackingServiceFactory { public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(Compressio...
UnpackingServiceFactory { public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(Compressio...
UnpackingServiceFactory { public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(Compressio...
@Test(expected = CompressionExtensionNotRecognizedException.class) public void shouldThrowExceptionIfTheExTensionWasNotRecognized() throws CompressionExtensionNotRecognizedException { UnpackingServiceFactory.createUnpackingService(UNDEFINED_COMPRESSION_EXTENSION); }
public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(CompressionFileExtension.GZIP.getExt...
UnpackingServiceFactory { public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(Compressio...
UnpackingServiceFactory { public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(Compressio...
UnpackingServiceFactory { public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(Compressio...
UnpackingServiceFactory { public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(Compressio...
@Test public void executeBolt() throws IOException { Tuple anchorTuple = mock(TupleImpl.class); StormTaskTuple tuple = new StormTaskTuple(TASK_ID, TASK_NAME, SOURCE_VERSION_URL, readMockContentOfURL(sampleXmlFileName), prepareStormTaskTupleParameters(sampleXsltFileName), new Revision()); xsltBolt.execute(anchorTuple, t...
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { StringWriter writer = null; try { final String fileUrl = stormTaskTuple.getFileUrl(); final String xsltUrl = stormTaskTuple.getParameter(PluginParameterKeys.XSLT_URL); LOGGER.info("Processing file: {} with xslt schema:{}", fileUrl, xsltUr...
XsltBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { StringWriter writer = null; try { final String fileUrl = stormTaskTuple.getFileUrl(); final String xsltUrl = stormTaskTuple.getParameter(PluginParameterKeys.XSLT_URL); LOGGER.info("Processing file: {} wi...
XsltBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { StringWriter writer = null; try { final String fileUrl = stormTaskTuple.getFileUrl(); final String xsltUrl = stormTaskTuple.getParameter(PluginParameterKeys.XSLT_URL); LOGGER.info("Processing file: {} wi...
XsltBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { StringWriter writer = null; try { final String fileUrl = stormTaskTuple.getFileUrl(); final String xsltUrl = stormTaskTuple.getParameter(PluginParameterKeys.XSLT_URL); LOGGER.info("Processing file: {} wi...
XsltBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { StringWriter writer = null; try { final String fileUrl = stormTaskTuple.getFileUrl(); final String xsltUrl = stormTaskTuple.getParameter(PluginParameterKeys.XSLT_URL); LOGGER.info("Processing file: {} wi...
@Test public void executeBoltWithInjection() throws IOException { Tuple anchorTuple = mock(TupleImpl.class); HashMap<String, String> parameters = prepareStormTaskTupleParameters(injectNodeXsltFileName); parameters.put(PluginParameterKeys.METIS_DATASET_ID, EXAMPLE_METIS_DATASET_ID); StormTaskTuple tuple = new StormTaskT...
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { StringWriter writer = null; try { final String fileUrl = stormTaskTuple.getFileUrl(); final String xsltUrl = stormTaskTuple.getParameter(PluginParameterKeys.XSLT_URL); LOGGER.info("Processing file: {} with xslt schema:{}", fileUrl, xsltUr...
XsltBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { StringWriter writer = null; try { final String fileUrl = stormTaskTuple.getFileUrl(); final String xsltUrl = stormTaskTuple.getParameter(PluginParameterKeys.XSLT_URL); LOGGER.info("Processing file: {} wi...
XsltBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { StringWriter writer = null; try { final String fileUrl = stormTaskTuple.getFileUrl(); final String xsltUrl = stormTaskTuple.getParameter(PluginParameterKeys.XSLT_URL); LOGGER.info("Processing file: {} wi...
XsltBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { StringWriter writer = null; try { final String fileUrl = stormTaskTuple.getFileUrl(); final String xsltUrl = stormTaskTuple.getParameter(PluginParameterKeys.XSLT_URL); LOGGER.info("Processing file: {} wi...
XsltBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { StringWriter writer = null; try { final String fileUrl = stormTaskTuple.getFileUrl(); final String xsltUrl = stormTaskTuple.getParameter(PluginParameterKeys.XSLT_URL); LOGGER.info("Processing file: {} wi...
@Test public void shouldIndexFileForPreviewEnv() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); String targetIndexingEnv = "PREVIEW"; StormTaskTuple tuple = mockStormTupleFor(targetIndexingEnv); mockIndexerFactoryFor(null); indexingBolt.execute(anchorTuple, tuple); Mockito.verify(outputCollector, Mockito...
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .get...
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); fina...
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); fina...
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); fina...
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); fina...
@Test public void shouldIndexFilePublishEnv() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); String targetIndexingEnv = "PUBLISH"; StormTaskTuple tuple = mockStormTupleFor(targetIndexingEnv); mockIndexerFactoryFor(null); indexingBolt.execute(anchorTuple, tuple); Mockito.verify(outputCollector, Mockito.ti...
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .get...
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); fina...
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); fina...
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); fina...
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); fina...
@Test public void shouldEmitErrorNotificationForIndexerConfiguration() throws IndexingException { Tuple anchorTuple = mock(TupleImpl.class); StormTaskTuple tuple = mockStormTupleFor("PREVIEW"); mockIndexerFactoryFor(IndexerRelatedIndexingException.class); indexingBolt.execute(anchorTuple, tuple); Mockito.verify(outputC...
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .get...
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); fina...
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); fina...
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); fina...
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); fina...
@Test public void shouldEmitErrorNotificationForIndexing() throws IndexingException { Tuple anchorTuple = mock(TupleImpl.class); StormTaskTuple tuple = mockStormTupleFor("PUBLISH"); mockIndexerFactoryFor(IndexerRelatedIndexingException.class); indexingBolt.execute(anchorTuple, tuple); Mockito.verify(outputCollector, Mo...
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .get...
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); fina...
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); fina...
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); fina...
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); fina...
@Test public void shouldThrowExceptionWhenDateIsUnParsable() throws IndexingException { Tuple anchorTuple = mock(TupleImpl.class); StormTaskTuple tuple = mockStormTupleFor("PREVIEW"); tuple.getParameters().remove(PluginParameterKeys.METIS_RECORD_DATE); tuple.addParameter(PluginParameterKeys.METIS_RECORD_DATE, "UN_PARSA...
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .get...
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); fina...
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); fina...
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); fina...
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); fina...
@Test public void shouldThrowExceptionForUnknownEnv() throws IndexingException { Tuple anchorTuple = mock(TupleImpl.class); StormTaskTuple tuple = mockStormTupleFor("UNKNOWN_ENVIRONMENT"); mockIndexerFactoryFor(RuntimeException.class); indexingBolt.execute(anchorTuple, tuple); Mockito.verify(outputCollector, Mockito.ti...
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .get...
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); fina...
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); fina...
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); fina...
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); fina...
@Test public void shouldInvokeAllTheRemovalStepsIncludingErrorReports() { removerInvoker.executeInvokerForSingleTask(TASK_ID, true); verify(remover, times(1)).removeNotifications((eq(TASK_ID))); verify(remover, times(1)).removeStatistics((eq(TASK_ID))); verify(remover, times(1)).removeErrorReports((eq(TASK_ID))); }
public void executeInvokerForSingleTask(long taskId, boolean shouldRemoveErrors) { remover.removeNotifications(taskId); LOGGER.info("Logs for task Id:" + taskId + " were removed successfully"); LOGGER.info("Removing statistics for:" + taskId + " was started. This step could take times depending on the size of the task"...
RemoverInvoker { public void executeInvokerForSingleTask(long taskId, boolean shouldRemoveErrors) { remover.removeNotifications(taskId); LOGGER.info("Logs for task Id:" + taskId + " were removed successfully"); LOGGER.info("Removing statistics for:" + taskId + " was started. This step could take times depending on the ...
RemoverInvoker { public void executeInvokerForSingleTask(long taskId, boolean shouldRemoveErrors) { remover.removeNotifications(taskId); LOGGER.info("Logs for task Id:" + taskId + " were removed successfully"); LOGGER.info("Removing statistics for:" + taskId + " was started. This step could take times depending on the ...
RemoverInvoker { public void executeInvokerForSingleTask(long taskId, boolean shouldRemoveErrors) { remover.removeNotifications(taskId); LOGGER.info("Logs for task Id:" + taskId + " were removed successfully"); LOGGER.info("Removing statistics for:" + taskId + " was started. This step could take times depending on the ...
RemoverInvoker { public void executeInvokerForSingleTask(long taskId, boolean shouldRemoveErrors) { remover.removeNotifications(taskId); LOGGER.info("Logs for task Id:" + taskId + " were removed successfully"); LOGGER.info("Removing statistics for:" + taskId + " was started. This step could take times depending on the ...
@Test public void testImportTopic() throws Exception { List<String> topics = setupTopic(zkClient, TEST_TOPIC_NAME); AtlasEntity.AtlasEntityWithExtInfo atlasEntityWithExtInfo = new AtlasEntity.AtlasEntityWithExtInfo( getTopicEntityWithGuid("0dd466a4-3838-4537-8969-6abb8b9e9185")); KafkaBridge kafkaBridge = mock(KafkaBri...
public void importTopic(String topicToImport) throws Exception { List<String> topics = availableTopics; if (StringUtils.isNotEmpty(topicToImport)) { List<String> topics_subset = new ArrayList<>(); for(String topic : topics) { if (Pattern.compile(topicToImport).matcher(topic).matches()) { topics_subset.add(topic); } } t...
KafkaBridge { public void importTopic(String topicToImport) throws Exception { List<String> topics = availableTopics; if (StringUtils.isNotEmpty(topicToImport)) { List<String> topics_subset = new ArrayList<>(); for(String topic : topics) { if (Pattern.compile(topicToImport).matcher(topic).matches()) { topics_subset.add...
KafkaBridge { public void importTopic(String topicToImport) throws Exception { List<String> topics = availableTopics; if (StringUtils.isNotEmpty(topicToImport)) { List<String> topics_subset = new ArrayList<>(); for(String topic : topics) { if (Pattern.compile(topicToImport).matcher(topic).matches()) { topics_subset.add...
KafkaBridge { public void importTopic(String topicToImport) throws Exception { List<String> topics = availableTopics; if (StringUtils.isNotEmpty(topicToImport)) { List<String> topics_subset = new ArrayList<>(); for(String topic : topics) { if (Pattern.compile(topicToImport).matcher(topic).matches()) { topics_subset.add...
KafkaBridge { public void importTopic(String topicToImport) throws Exception { List<String> topics = availableTopics; if (StringUtils.isNotEmpty(topicToImport)) { List<String> topics_subset = new ArrayList<>(); for(String topic : topics) { if (Pattern.compile(topicToImport).matcher(topic).matches()) { topics_subset.add...
@Test public void ALLEntityType() throws AtlasBaseException { SearchParameters params = new SearchParameters(); params.setTypeName(SearchParameters.ALL_ENTITY_TYPES); params.setLimit(20); SearchContext context = new SearchContext(params, typeRegistry, graph, Collections.<String>emptySet()); EntitySearchProcessor proces...
@Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySear...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
@Test public void ALLEntityTypeWithTag() throws AtlasBaseException { SearchParameters params = new SearchParameters(); params.setTypeName(SearchParameters.ALL_ENTITY_TYPES); params.setClassification(FACT_CLASSIFICATION); params.setLimit(20); SearchContext context = new SearchContext(params, typeRegistry, graph, Collect...
@Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySear...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
@Test public void entityType() throws AtlasBaseException { SearchParameters params = new SearchParameters(); params.setTypeName(DATABASE_TYPE); params.setLimit(20); SearchContext context = new SearchContext(params, typeRegistry, graph, Collections.<String>emptySet()); EntitySearchProcessor processor = new EntitySearchP...
@Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySear...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
@Test public void entityTypes() throws AtlasBaseException { SearchParameters params = new SearchParameters(); params.setTypeName(DATABASE_TYPE+","+HIVE_TABLE_TYPE); params.setLimit(20); SearchContext context = new SearchContext(params, typeRegistry, graph, Collections.<String>emptySet()); EntitySearchProcessor processo...
@Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySear...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
@Test public void entityTypesAndTag() throws AtlasBaseException { SearchParameters params = new SearchParameters(); params.setTypeName(DATABASE_TYPE+","+HIVE_TABLE_TYPE); params.setClassification(FACT_CLASSIFICATION); params.setLimit(20); SearchContext context = new SearchContext(params, typeRegistry, graph, Collection...
@Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySear...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
@Test public void searchWithEntityTypesAndEntityFilters() throws AtlasBaseException { SearchParameters params = new SearchParameters(); params.setTypeName(DATABASE_TYPE+","+HIVE_TABLE_TYPE); SearchParameters.FilterCriteria filterCriteria = getSingleFilterCondition("owner", SearchParameters.Operator.CONTAINS, "ETL"); pa...
@Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySear...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
@Test public void searchWithEntityTypesAndEntityFiltersAndTag() throws AtlasBaseException { SearchParameters params = new SearchParameters(); params.setTypeName(DATABASE_TYPE+","+HIVE_TABLE_TYPE); SearchParameters.FilterCriteria filterCriteria = getSingleFilterCondition("owner", SearchParameters.Operator.CONTAINS, "ETL...
@Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySear...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
@Test public void searchWithNotContains_stringAttr() throws AtlasBaseException { SearchParameters params = new SearchParameters(); params.setTypeName(HIVE_TABLE_TYPE); SearchParameters.FilterCriteria filterCriteria = getSingleFilterCondition("tableType", SearchParameters.Operator.NOT_CONTAINS, "Managed"); params.setEnt...
@Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySear...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
@Test public void searchWithNotContains_pipeSeperatedAttr() throws AtlasBaseException { SearchParameters params = new SearchParameters(); params.setTypeName(HIVE_TABLE_TYPE); SearchParameters.FilterCriteria filterCriteria = getSingleFilterCondition("__classificationNames", SearchParameters.Operator.NOT_CONTAINS, METRIC...
@Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySear...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = Atl...
@Test public void testGetMetrics() { AtlasMetrics metrics = metricsService.getMetrics(); assertNotNull(metrics); assertEquals(metrics.getNumericMetric(GENERAL, METRIC_ENTITY_COUNT).intValue(), 43); assertEquals(metrics.getNumericMetric(GENERAL, METRIC_TAG_COUNT).intValue(), 1); assertTrue(metrics.getNumericMetric(GENER...
@SuppressWarnings("unchecked") @GraphTransaction public AtlasMetrics getMetrics() { final AtlasTypesDef typesDef = getTypesDef(); Collection<AtlasEntityDef> entityDefs = typesDef.getEntityDefs(); Collection<AtlasClassificationDef> classificationDefs = typesDef.getClassificationDefs(); Map<String, Long> activeEntityCoun...
MetricsService { @SuppressWarnings("unchecked") @GraphTransaction public AtlasMetrics getMetrics() { final AtlasTypesDef typesDef = getTypesDef(); Collection<AtlasEntityDef> entityDefs = typesDef.getEntityDefs(); Collection<AtlasClassificationDef> classificationDefs = typesDef.getClassificationDefs(); Map<String, Long>...
MetricsService { @SuppressWarnings("unchecked") @GraphTransaction public AtlasMetrics getMetrics() { final AtlasTypesDef typesDef = getTypesDef(); Collection<AtlasEntityDef> entityDefs = typesDef.getEntityDefs(); Collection<AtlasClassificationDef> classificationDefs = typesDef.getClassificationDefs(); Map<String, Long>...
MetricsService { @SuppressWarnings("unchecked") @GraphTransaction public AtlasMetrics getMetrics() { final AtlasTypesDef typesDef = getTypesDef(); Collection<AtlasEntityDef> entityDefs = typesDef.getEntityDefs(); Collection<AtlasClassificationDef> classificationDefs = typesDef.getClassificationDefs(); Map<String, Long>...
MetricsService { @SuppressWarnings("unchecked") @GraphTransaction public AtlasMetrics getMetrics() { final AtlasTypesDef typesDef = getTypesDef(); Collection<AtlasEntityDef> entityDefs = typesDef.getEntityDefs(); Collection<AtlasClassificationDef> classificationDefs = typesDef.getClassificationDefs(); Map<String, Long>...
@Test public void testMapTypeIsValidValue() { for (Object value : validValues) { assertTrue(intIntMapType.isValidValue(value), "value=" + value); } for (Object value : invalidValues) { assertFalse(intIntMapType.isValidValue(value), "value=" + value); } }
@Override public boolean isValidValue(Object obj) { if (obj != null) { if (obj instanceof Map) { Map<Object, Objects> map = (Map<Object, Objects>) obj; for (Map.Entry e : map.entrySet()) { if (!keyType.isValidValue(e.getKey()) || !valueType.isValidValue(e.getValue())) { return false; } } } else { return false; } } retu...
AtlasMapType extends AtlasType { @Override public boolean isValidValue(Object obj) { if (obj != null) { if (obj instanceof Map) { Map<Object, Objects> map = (Map<Object, Objects>) obj; for (Map.Entry e : map.entrySet()) { if (!keyType.isValidValue(e.getKey()) || !valueType.isValidValue(e.getValue())) { return false; } ...
AtlasMapType extends AtlasType { @Override public boolean isValidValue(Object obj) { if (obj != null) { if (obj instanceof Map) { Map<Object, Objects> map = (Map<Object, Objects>) obj; for (Map.Entry e : map.entrySet()) { if (!keyType.isValidValue(e.getKey()) || !valueType.isValidValue(e.getValue())) { return false; } ...
AtlasMapType extends AtlasType { @Override public boolean isValidValue(Object obj) { if (obj != null) { if (obj instanceof Map) { Map<Object, Objects> map = (Map<Object, Objects>) obj; for (Map.Entry e : map.entrySet()) { if (!keyType.isValidValue(e.getKey()) || !valueType.isValidValue(e.getValue())) { return false; } ...
AtlasMapType extends AtlasType { @Override public boolean isValidValue(Object obj) { if (obj != null) { if (obj instanceof Map) { Map<Object, Objects> map = (Map<Object, Objects>) obj; for (Map.Entry e : map.entrySet()) { if (!keyType.isValidValue(e.getKey()) || !valueType.isValidValue(e.getValue())) { return false; } ...
@Test(dependsOnMethods = "filterInternalType") public void createsNewProfile() throws AtlasBaseException { for (int i = 0; i < NUM_USERS; i++) { AtlasUserProfile expected = getAtlasUserProfile(i); AtlasUserProfile actual = userProfileService.saveUserProfile(expected); assertNotNull(actual); assertEquals(expected.getNam...
public AtlasUserProfile saveUserProfile(AtlasUserProfile profile) throws AtlasBaseException { return dataAccess.save(profile); }
UserProfileService { public AtlasUserProfile saveUserProfile(AtlasUserProfile profile) throws AtlasBaseException { return dataAccess.save(profile); } }
UserProfileService { public AtlasUserProfile saveUserProfile(AtlasUserProfile profile) throws AtlasBaseException { return dataAccess.save(profile); } @Inject UserProfileService(DataAccess dataAccess); }
UserProfileService { public AtlasUserProfile saveUserProfile(AtlasUserProfile profile) throws AtlasBaseException { return dataAccess.save(profile); } @Inject UserProfileService(DataAccess dataAccess); AtlasUserProfile saveUserProfile(AtlasUserProfile profile); AtlasUserProfile getUserProfile(String userName); AtlasUse...
UserProfileService { public AtlasUserProfile saveUserProfile(AtlasUserProfile profile) throws AtlasBaseException { return dataAccess.save(profile); } @Inject UserProfileService(DataAccess dataAccess); AtlasUserProfile saveUserProfile(AtlasUserProfile profile); AtlasUserProfile getUserProfile(String userName); AtlasUse...
@Test(dependsOnMethods = "saveSearchesForUser", expectedExceptions = AtlasBaseException.class) public void attemptToAddExistingSearch() throws AtlasBaseException { String userName = getIndexBasedUserName(0); SearchParameters expectedSearchParameter = getActualSearchParameters(); for (int j = 0; j < NUM_SEARCHES; j++) {...
public AtlasUserSavedSearch addSavedSearch(AtlasUserSavedSearch savedSearch) throws AtlasBaseException { String userName = savedSearch.getOwnerName(); AtlasUserProfile userProfile = null; try { userProfile = getUserProfile(userName); } catch (AtlasBaseException excp) { } if (userProfile == null) { userProfile = new Atl...
UserProfileService { public AtlasUserSavedSearch addSavedSearch(AtlasUserSavedSearch savedSearch) throws AtlasBaseException { String userName = savedSearch.getOwnerName(); AtlasUserProfile userProfile = null; try { userProfile = getUserProfile(userName); } catch (AtlasBaseException excp) { } if (userProfile == null) { ...
UserProfileService { public AtlasUserSavedSearch addSavedSearch(AtlasUserSavedSearch savedSearch) throws AtlasBaseException { String userName = savedSearch.getOwnerName(); AtlasUserProfile userProfile = null; try { userProfile = getUserProfile(userName); } catch (AtlasBaseException excp) { } if (userProfile == null) { ...
UserProfileService { public AtlasUserSavedSearch addSavedSearch(AtlasUserSavedSearch savedSearch) throws AtlasBaseException { String userName = savedSearch.getOwnerName(); AtlasUserProfile userProfile = null; try { userProfile = getUserProfile(userName); } catch (AtlasBaseException excp) { } if (userProfile == null) { ...
UserProfileService { public AtlasUserSavedSearch addSavedSearch(AtlasUserSavedSearch savedSearch) throws AtlasBaseException { String userName = savedSearch.getOwnerName(); AtlasUserProfile userProfile = null; try { userProfile = getUserProfile(userName); } catch (AtlasBaseException excp) { } if (userProfile == null) { ...
@Test(dependsOnMethods = "attemptToAddExistingSearch") public void verifySavedSearchesForUser() throws AtlasBaseException { String userName = getIndexBasedUserName(0); List<AtlasUserSavedSearch> searches = userProfileService.getSavedSearches(userName); List<String> names = getIndexBasedQueryNamesList(); for (int i = 0;...
public List<AtlasUserSavedSearch> getSavedSearches(String userName) throws AtlasBaseException { AtlasUserProfile profile = null; try { profile = getUserProfile(userName); } catch (AtlasBaseException excp) { } return (profile != null) ? profile.getSavedSearches() : null; }
UserProfileService { public List<AtlasUserSavedSearch> getSavedSearches(String userName) throws AtlasBaseException { AtlasUserProfile profile = null; try { profile = getUserProfile(userName); } catch (AtlasBaseException excp) { } return (profile != null) ? profile.getSavedSearches() : null; } }
UserProfileService { public List<AtlasUserSavedSearch> getSavedSearches(String userName) throws AtlasBaseException { AtlasUserProfile profile = null; try { profile = getUserProfile(userName); } catch (AtlasBaseException excp) { } return (profile != null) ? profile.getSavedSearches() : null; } @Inject UserProfileServic...
UserProfileService { public List<AtlasUserSavedSearch> getSavedSearches(String userName) throws AtlasBaseException { AtlasUserProfile profile = null; try { profile = getUserProfile(userName); } catch (AtlasBaseException excp) { } return (profile != null) ? profile.getSavedSearches() : null; } @Inject UserProfileServic...
UserProfileService { public List<AtlasUserSavedSearch> getSavedSearches(String userName) throws AtlasBaseException { AtlasUserProfile profile = null; try { profile = getUserProfile(userName); } catch (AtlasBaseException excp) { } return (profile != null) ? profile.getSavedSearches() : null; } @Inject UserProfileServic...