Skip to content

Commit

Permalink
refactor(tests): convert groovy tests to java
Browse files Browse the repository at this point in the history
  • Loading branch information
kirangodishala committed Oct 16, 2024
1 parent 3f06b13 commit d076e91
Show file tree
Hide file tree
Showing 5 changed files with 194 additions and 169 deletions.

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1,118 @@
/*
* Copyright 2023 Salesforce, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/

package com.netflix.spinnaker.front50.pipeline;

import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.mockito.ArgumentMatchers.anyBoolean;
import static org.mockito.Mockito.*;

import com.netflix.spinnaker.front50.api.model.pipeline.Pipeline;
import com.netflix.spinnaker.front50.model.pipeline.DefaultPipelineDAO;
import com.netflix.spinnaker.kork.sql.test.SqlTestUtil;
import java.util.Collection;
import java.util.List;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
import org.mockito.Mockito;

public abstract class DefaultPipelineDAOTest extends PipelineDAOSpec<DefaultPipelineDAO> {

protected DefaultPipelineDAO pipelineDAO;

@Override
public DefaultPipelineDAO getInstance() {
return getDefaultPipelineDAO();
}

public abstract DefaultPipelineDAO getDefaultPipelineDAO();

@BeforeEach
public void setup() {
this.pipelineDAO = Mockito.spy(getDefaultPipelineDAO());
}

@ParameterizedTest
@CsvSource({
"'app', 'pipelineNameA', 'NameA', 'pipelineNameA'",
"'app', 'pipelineNameA', , 'pipelineNameA'",
"'app', , , "
})
public void shouldReturnCorrectPipelinesWhenRequestingPipelinesByApplicationWithNameFilter(
String applicationName,
String pipelineName,
String pipelineNameFilter,
String expectedPipelineName) {

Pipeline pipeline = new Pipeline();
pipeline.setId("0");
pipeline.setApplication(applicationName);
pipeline.setName(pipelineName);

doReturn(List.of(pipeline)).when(pipelineDAO).all(anyBoolean());

Collection<Pipeline> pipelines =
pipelineDAO.getPipelinesByApplication("app", pipelineNameFilter, true);

Pipeline resultPipeline = pipelines.iterator().next();
assertEquals(resultPipeline.getName(), expectedPipelineName);
assertEquals(resultPipeline.getApplication(), "app");
}

@ParameterizedTest
@CsvSource({
"'app', , 'NameA'",
"'bad', 'pipelineNameA', 'NameA'",
"'bad', , 'NameA'",
"'bad', 'pipelineNameA', ",
"'bad', , "
})
public void shouldReturnNoPipelinesWhenRequestingPipelinesByApplicationWithNameFilter(
String applicationName, String pipelineName, String pipelineNameFilter) {

Pipeline pipeline = new Pipeline();
pipeline.setId("0");
pipeline.setApplication(applicationName);
pipeline.setName(pipelineName);

doReturn(List.of(pipeline)).when(pipelineDAO).all(true);

Collection<Pipeline> pipelines =
pipelineDAO.getPipelinesByApplication("app", pipelineNameFilter, true);

assertEquals(0, pipelines.size());
}
}

class SqlDefaultPipelineDAOTest extends DefaultPipelineDAOTest {

private SqlTestUtil.TestDatabase database = SqlTestUtil.initTcMysqlDatabase();

@AfterEach
public void cleanup() {
if (database != null) {
SqlTestUtil.cleanupDb(database.context);
}
}

@Override
public DefaultPipelineDAO getDefaultPipelineDAO() {
return SqlPipelineDAOTestConfiguration.createPipelineDAO(database);
}
}

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
/*
* Copyright 2023 Salesforce, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/

package com.netflix.spinnaker.front50.pipeline;

import com.fasterxml.jackson.databind.ObjectMapper;
import com.netflix.spectator.api.NoopRegistry;
import com.netflix.spinnaker.config.Front50SqlProperties;
import com.netflix.spinnaker.front50.config.StorageServiceConfigurationProperties;
import com.netflix.spinnaker.front50.model.DefaultObjectKeyLoader;
import com.netflix.spinnaker.front50.model.SqlStorageService;
import com.netflix.spinnaker.front50.model.pipeline.DefaultPipelineDAO;
import com.netflix.spinnaker.kork.sql.config.SqlRetryProperties;
import com.netflix.spinnaker.kork.sql.test.SqlTestUtil;
import io.github.resilience4j.circuitbreaker.CircuitBreakerRegistry;
import java.time.Clock;
import java.util.concurrent.Executors;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import rx.Scheduler;
import rx.schedulers.Schedulers;

@Configuration
public class SqlPipelineDAOTestConfiguration {

@Bean
public static DefaultPipelineDAO createPipelineDAO(SqlTestUtil.TestDatabase database) {
Scheduler scheduler = Schedulers.from(Executors.newFixedThreadPool(1));

StorageServiceConfigurationProperties.PerObjectType pipelineDAOConfigProperties =
new StorageServiceConfigurationProperties().getPipeline();

SqlStorageService storageService =
new SqlStorageService(
new ObjectMapper(),
new NoopRegistry(),
database.context,
Clock.systemDefaultZone(),
new SqlRetryProperties(),
1,
"default",
new Front50SqlProperties());

// Configure PipelineDAO properties
pipelineDAOConfigProperties.setRefreshMs(0);
pipelineDAOConfigProperties.setShouldWarmCache(false);

DefaultPipelineDAO pipelineDAO =
new DefaultPipelineDAO(
storageService,
scheduler,
new DefaultObjectKeyLoader(storageService),
pipelineDAOConfigProperties,
new NoopRegistry(),
CircuitBreakerRegistry.ofDefaults());

// refreshing to initialize the cache with an empty set
pipelineDAO.all(true);

return pipelineDAO;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,27 +16,22 @@

package com.netflix.spinnaker.front50.controllers

import com.netflix.spectator.api.NoopRegistry
import com.netflix.spinnaker.fiat.shared.FiatPermissionEvaluator
import com.netflix.spinnaker.front50.ServiceAccountsService
import com.netflix.spinnaker.front50.api.model.pipeline.Pipeline
import com.netflix.spinnaker.front50.api.model.pipeline.Trigger
import com.netflix.spinnaker.front50.config.StorageServiceConfigurationProperties
import com.netflix.spinnaker.front50.config.controllers.PipelineControllerConfig
import com.netflix.spinnaker.front50.jackson.Front50ApiModule
import com.netflix.spinnaker.front50.model.DefaultObjectKeyLoader
import com.netflix.spinnaker.front50.model.pipeline.DefaultPipelineDAO
import com.netflix.spinnaker.kork.sql.test.SqlTestUtil

import com.netflix.spinnaker.front50.pipeline.SqlPipelineDAOTestConfiguration
import com.netflix.spinnaker.kork.web.exceptions.ExceptionMessageDecorator
import com.netflix.spinnaker.kork.web.exceptions.GenericExceptionHandlers
import io.github.resilience4j.circuitbreaker.CircuitBreakerRegistry
import org.hamcrest.Matchers
import org.springframework.beans.factory.ObjectProvider
import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter
import org.springframework.web.util.UriComponentsBuilder
import rx.schedulers.Schedulers

import java.nio.charset.StandardCharsets
import java.util.concurrent.Callable
Expand Down

0 comments on commit d076e91

Please sign in to comment.