@BeforeClass public static void generateTestDataAndQueries() throws Exception { // Table consists of two columns "emp_id", "emp_name" and "dept_id" empTableLocation = testTempFolder.newFolder().getAbsolutePath(); // Write 100 records for each new file final int empNumRecsPerFile = 100; for(int fileIndex=0; fileIndex<NUM_EMPLOYEES/empNumRecsPerFile; fileIndex++) { File file = new File(empTableLocation + File.separator + fileIndex + ".json"); PrintWriter printWriter = new PrintWriter(file); for (int recordIndex = fileIndex*empNumRecsPerFile; recordIndex < (fileIndex+1)*empNumRecsPerFile; recordIndex++) { String record = String.format("{ \"emp_id\" : %d, \"emp_name\" : \"Employee %d\", \"dept_id\" : %d }", recordIndex, recordIndex, recordIndex % NUM_DEPTS); printWriter.println(record); } printWriter.close(); } // Initialize test queries groupByQuery = String.format("SELECT dept_id, count(*) as numEmployees FROM dfs.`%s` GROUP BY dept_id", empTableLocation); }
@BeforeClass public static void setup() throws Exception { workingDirectory = System.getProperty( "workingDirectory" ); if ( workingDirectory == null ) { String path = SchemaManagerAddTest.class.getResource( "" ).getPath(); int targetPos = path.indexOf( "target" ); workingDirectory = path.substring( 0, targetPos + 6 ); } // Make sure every test class has its own schema directory workingDirectory = new File( workingDirectory, "SchemaManagerAddTest" ).getAbsolutePath(); schemaRepository = new File( workingDirectory, "schema" ); // Cleanup the target directory FileUtils.deleteDirectory( schemaRepository ); SchemaLdifExtractor extractor = new DefaultSchemaLdifExtractor( new File( workingDirectory ) ); extractor.extractOrCopy(); }
@BeforeClass public static void beforeClassSetup() throws InterruptedException { network = (NeatNetwork) NetworkBuilder .create(NetworkType.EVOLUTION) .setInputLayer( LayerBuilder .create() .addNodes(1, Activations.sigmoid)) .addHiddenLayer( LayerBuilder .create() .addNodes(4, Activations.sigmoid)) .setOutputLayer( LayerBuilder .create() .addNodes(1, Activations.sigmoid) ) .withBiasNode() .withSettings( NeatSettings .create() .setRandomSeed(3000)) .build(); }
@BeforeClass public static void createTempFile() throws Exception { File tempFile; while (true) { tempFile = File.createTempFile("drillFSTest", ".txt"); if (tempFile.exists()) { boolean success = tempFile.delete(); if (success) { break; } } } // Write some data PrintWriter printWriter = new PrintWriter(tempFile); for (int i=1; i<=200000; i++) { printWriter.println (String.format("%d, key_%d", i, i)); } printWriter.close(); tempFilePath = tempFile.getPath(); }
@BeforeClass public static void setup() throws Exception { SharesDataRetrievalServiceLocal billingRetrievalService = mock(SharesDataRetrievalServiceLocal.class); when( billingRetrievalService.loadOrganizationHistoryRoles(anyLong(), anyLong())).thenReturn( Arrays.asList(new OrganizationRole( OrganizationRoleType.SUPPLIER))); supplierShareAssembler = spy(new SupplierShareResultAssembler( billingRetrievalService)); createBillingResults(); mockOrganizationData(); mockOrganizationHistoryRoles(); mockGetMarketplaceRevenueSharePercentage(); mockGetOperatorRevenueSharePercentage(); mockGetSellerRevenueSharePercentage(); mockGetProductHistoryData(); mockGetSubscriptionHistoryData(); mockGetBillingResults(); mockFindMarketplaceHistory(); mockGetOrgData(CUSTOMER); mockXmlSearch(); datatypeFactory = DatatypeFactory.newInstance(); }
@BeforeClass public static void setUp() { server.start(); server.addReference(REF_BIO_ID, REF_ID, REFERENCE_NAME, PATH_TO_REFERENCE); //register vcf, no name, no index server.addFeatureIndexedFileRegistration(REF_ID, PATH_TO_VCF, null, VCF_ID, VCF_BIO_ID, BiologicalDataItemFormat.VCF, true); // add another without feature index server.addFeatureIndexedFileRegistration(REF_ID, PATH_TO_VCF, null, VCF_ID, VCF_BIO_ID, BiologicalDataItemFormat.VCF, false); //register BAM with name server.addFileRegistration(REF_ID, PATH_TO_BAM, PATH_TO_BAI, BAM_NAME, BAM_ID, BAM_BIO_ID, BiologicalDataItemFormat.BAM); //register BAM without name server.addFileRegistration(REF_ID, PATH_TO_BAM, PATH_TO_BAI, null, BAM_ID, BAM_BIO_ID, BiologicalDataItemFormat.BAM); serverParameters = getDefaultServerOptions(server.getPort()); }
@BeforeClass public static void setUp() throws Exception { WebserviceTestBase.getMailReader().deleteMails(); WebserviceTestBase.getOperator().addCurrency("EUR"); setup = new WebserviceTestSetup(); supplier1 = setup.createSupplier("Supplier1"); is = ServiceFactory.getDefault().getIdentityService( WebserviceTestBase.getPlatformOperatorKey(), WebserviceTestBase.getPlatformOperatorPassword()); VOUserDetails userDetails = is.getCurrentUserDetails(); userDetails.setEMail(WebserviceTestBase.getMailReader() .getMailAddress()); is.updateUser(userDetails); WebserviceTestBase.getMailReader().deleteMails(); }
@BeforeClass public static void setUpFixture() { StringBuilder sb = new StringBuilder(); sb.append("Lorem ipsum dolor sit amet, eam no tale solet patrioque, est ") .append("ne dico veri. Copiosae petentium no eum, has at wisi dicunt causae. Duo ea ") .append("animal eligendi honestatis, dico fastidii officiis sit ne. At oblique ") .append("docendi verterem ius, te vide cibo gloriatur nam. Ad has possit delicata. ") .append("Sit vocibus accusamus an."); loremIpsum = sb.toString(); sb = new StringBuilder(); for (int i = 0; i < 10000; i++) { sb.append(loremIpsum); } repeatedLoremIpsum = sb.toString(); }
/** * Initialize data base with 'MDA' JIRA project. */ @BeforeClass public static void initializeJiraDataBase() throws SQLException { datasource = new SimpleDriverDataSource(new JDBCDriver(), "jdbc:hsqldb:mem:dataSource", null, null); final Connection connection = datasource.getConnection(); final JdbcTemplate jdbcTemplate = new JdbcTemplate(datasource); try { ScriptUtils.executeSqlScript(connection, new EncodedResource(new ClassPathResource("sql/base-1/jira-create.sql"), StandardCharsets.UTF_8)); ScriptUtils.executeSqlScript(connection, new EncodedResource(new ClassPathResource("sql/base-1/jira.sql"), StandardCharsets.UTF_8)); jdbcTemplate.queryForList("SELECT * FROM pluginversion WHERE ID = 10075"); } finally { connection.close(); } }
@BeforeClass public static void setupConfigsAndUtils() throws Exception { PRODUCER_CONFIG.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers()); PRODUCER_CONFIG.put(ProducerConfig.ACKS_CONFIG, "all"); PRODUCER_CONFIG.put(ProducerConfig.RETRIES_CONFIG, 0); PRODUCER_CONFIG.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class); PRODUCER_CONFIG.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); RESULT_CONSUMER_CONFIG.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers()); RESULT_CONSUMER_CONFIG.put(ConsumerConfig.GROUP_ID_CONFIG, APP_ID + "-result-consumer"); RESULT_CONSUMER_CONFIG.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); RESULT_CONSUMER_CONFIG.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class); RESULT_CONSUMER_CONFIG.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); STREAMS_CONFIG.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers()); STREAMS_CONFIG.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); STREAMS_CONFIG.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getPath()); STREAMS_CONFIG.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.Long().getClass()); STREAMS_CONFIG.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass()); STREAMS_CONFIG.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, 0); STREAMS_CONFIG.put(IntegrationTestUtils.INTERNAL_LEAVE_GROUP_ON_CLOSE, true); STREAMS_CONFIG.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 100); }
@BeforeClass public static void init() throws NoSuchFieldException, IllegalAccessException, InterruptedException, RemotingTimeoutException, MQClientException, RemotingSendRequestException, RemotingConnectException, MQBrokerException, UnsupportedEncodingException { mQClientAPIImpl = mock(MQClientAPIImpl.class); defaultMQAdminExt = new DefaultMQAdminExt(); defaultMQAdminExtImpl = new DefaultMQAdminExtImpl(defaultMQAdminExt, 1000); Field field = DefaultMQAdminExtImpl.class.getDeclaredField("mqClientInstance"); field.setAccessible(true); field.set(defaultMQAdminExtImpl, mqClientInstance); field = MQClientInstance.class.getDeclaredField("mQClientAPIImpl"); field.setAccessible(true); field.set(mqClientInstance, mQClientAPIImpl); field = DefaultMQAdminExt.class.getDeclaredField("defaultMQAdminExtImpl"); field.setAccessible(true); field.set(defaultMQAdminExt, defaultMQAdminExtImpl); Properties properties = new Properties(); properties.setProperty("maxMessageSize", "5000000"); properties.setProperty("flushDelayOffsetInterval", "15000"); properties.setProperty("serverSocketRcvBufSize", "655350"); when(mQClientAPIImpl.getBrokerConfig(anyString(), anyLong())).thenReturn(properties); }
@BeforeClass public static void beforeClass() { String queueName = "DEMO.CONTRACT.MOCK"; System.setProperty("karate.env", "contract"); File file = FileUtils.getFileRelativeTo(PaymentServiceContractUsingMockTest.class, "payment-service-mock.feature"); server = FeatureServer.start(file, 0, false, Collections.singletonMap("queueName", queueName)); String paymentServiceUrl = "http://localhost:" + server.getPort(); System.setProperty("payment.service.url", paymentServiceUrl); System.setProperty("shipping.queue.name", queueName); }
@BeforeClass public static void checkAvailability() { try { new Hmac(); } catch (WolfCryptException e) { if (e.getError() == WolfCryptError.NOT_COMPILED_IN) System.out.println("Hmac test skipped: " + e.getError()); Assume.assumeNoException(e); } }
@BeforeClass public static void setupDefaultTestCluster() throws Exception { // Create a test implementation of UserService // and inject it in SabotNode. BINDER_RULE.bind(UserService.class, new UserServiceTestImpl()); BaseTestQuery.setupDefaultTestCluster(); }
@BeforeClass public static void init() { Calendar c = Calendar.getInstance(); c.set(1979, 2, 9, 11, 30); c.set(Calendar.SECOND, 0); c.set(Calendar.MILLISECOND, 0); DATE_VALUE = c.getTime(); builder = GsonConfiguration.builder(); }
@BeforeClass public static void setUp() throws Exception { DynamoDBMapperIntegrationTestBase.setUp(); // Insert the data for (Map<String, AttributeValue> attr : attrs) { dynamo.putItem(PutItemRequest.builder().tableName(TABLE_NAME).item(attr).build()); } }
@BeforeClass public static void setUpBeforeClass() throws Exception { TestReplicationBase.setUpBeforeClass(); connection1 = ConnectionFactory.createConnection(conf1); connection2 = ConnectionFactory.createConnection(conf2); admin1 = connection1.getAdmin(); admin2 = connection2.getAdmin(); adminExt = new ReplicationAdmin(conf1); }
@BeforeClass public static void setUp() throws Exception { LanguageInterface language = new French(); PlayerInterface player = new HumanPlayer("Christopher Anciaux"); PlayerInterface player2 = new HumanPlayer("Joueur 2"); player.addLetters(Arrays.asList(LetterToStringTransformer.reverseTransform("A", language), LetterToStringTransformer.reverseTransform("B", language), LetterToStringTransformer.reverseTransform("C", language), LetterToStringTransformer.reverseTransform("J", language))); player2.addLetters(Arrays.asList(LetterToStringTransformer.reverseTransform("D", language), LetterToStringTransformer.reverseTransform("B", language), LetterToStringTransformer.reverseTransform("J", language), LetterToStringTransformer.reverseTransform("C", language))); GameSaverTest.gameSave = new GameSave(language, new Board(), Arrays.asList(player, player2), player, new Bag(language.getBagLettersDistribution()), (short) 0); }
@BeforeClass public static void init(){ mockedWebRequest = Mockito.mock(WebRequest.class); Mockito.when(mockedWebRequest.getParameter("id")).thenReturn("1"); Mockito.when(mockedWebRequest.getParameter("type")).thenReturn("Samplerunner"); Mockito.when(mockedWebRequest.getParameter("manufacturer")).thenReturn("Sample Co"); Mockito.when(mockedWebRequest.getParameter("date_of_manufacture")).thenReturn("1999-01-01"); Mockito.when(mockedWebRequest.getParameter("date_of_acquire")).thenReturn("2017-01-01"); Mockito.when(mockedWebRequest.getParameter("date_of_supervision")).thenReturn("2017-05-05"); Mockito.when(mockedWebRequest.getParameter("plate_number")).thenReturn("SAM-LPE"); Mockito.when(mockedWebRequest.getParameter("chassis_number")).thenReturn("SSSAMPLE"); Mockito.when(mockedWebRequest.getParameter("weight")).thenReturn("1"); Mockito.when(mockedWebRequest.getParameter("max_load_weight")).thenReturn("100"); }
@BeforeClass public static void setUp() { server.start(); server.addReference(REF_BIO_ID, REF_ID, REFERENCE_NAME, PATH_TO_REFERENCE); //simple case - empty dataset BiologicalDataItem reference = TestDataProvider.getBioItem(REF_ID, REF_BIO_ID, BiologicalDataItemFormat.REFERENCE, PATH_TO_REFERENCE, REFERENCE_NAME); server.addDatasetRegistration(DATASET_NAME, Collections.singletonList(reference), DATASET_ID); //dataset + parent ID server.addDataset(DATASET_ID, DATASET_NAME, Collections.singletonList(reference)); server.addDatasetRegistrationWithParent(DATASET_WITH_PARENT_NAME, Collections.singletonList(reference), DATASET_WIH_PARENT_ID, DATASET_ID); //dataset with registered file BiologicalDataItem vcf = TestDataProvider.getBioItem(VCF_ID, VCF_BIO_ID, BiologicalDataItemFormat.VCF, PATH_TO_VCF, VCF_NAME); server.addFile(VCF_BIO_ID, VCF_ID, VCF_NAME, PATH_TO_VCF, BiologicalDataItemFormat.VCF); server.addDatasetRegistration(DATASET_WITH_VCF_NAME, Arrays.asList(reference, vcf), DATASET_WITH_VCF_ID); //dataset with file registration BiologicalDataItem bam = TestDataProvider.getBioItem(BAM_ID, BAM_BIO_ID, BiologicalDataItemFormat.BAM, PATH_TO_BAM, BAM_NAME); server.addFileRegistration(REF_ID, PATH_TO_BAM, PATH_TO_BAI, null, BAM_ID, BAM_BIO_ID, BiologicalDataItemFormat.BAM); server.addDatasetRegistration(DATASET_WITH_BAM_NAME, Arrays.asList(reference, vcf, bam), DATASET_WITH_BAM_ID); serverParameters = getDefaultServerOptions(server.getPort()); }
@BeforeClass public static void init() { // Resolve required services CHECK_OUT_CHECK_IN_SERVICE = APP_CONTEXT_INIT.getApplicationContext().getBean("CheckOutCheckInService", CheckOutCheckInService.class); CONTENT_SERVICE = APP_CONTEXT_INIT.getApplicationContext().getBean("contentService", ContentService.class); DOWNLOAD_SERVICE = APP_CONTEXT_INIT.getApplicationContext().getBean("DownloadService", DownloadService.class); NODE_SERVICE = APP_CONTEXT_INIT.getApplicationContext().getBean("NodeService", NodeService.class); PERMISSION_SERVICE = APP_CONTEXT_INIT.getApplicationContext().getBean("PermissionService", PermissionService.class); TRANSACTION_HELPER = APP_CONTEXT_INIT.getApplicationContext().getBean("retryingTransactionHelper", RetryingTransactionHelper.class); INTEGRITY_CHECKER = APP_CONTEXT_INIT.getApplicationContext().getBean("integrityChecker", IntegrityChecker.class); INTEGRITY_CHECKER.setEnabled(true); INTEGRITY_CHECKER.setFailOnViolation(true); INTEGRITY_CHECKER.setTraceOn(true); }
@BeforeClass public static void init() throws Exception { GenericTestUtils.assumeInNativeProfile(); Configuration conf = new Configuration(); conf.set( CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_AES_CTR_NOPADDING_KEY, OpensslAesCtrCryptoCodec.class.getName()); codec = CryptoCodec.getInstance(conf); assertNotNull("Unable to instantiate codec " + OpensslAesCtrCryptoCodec.class.getName() + ", is the required " + "version of OpenSSL installed?", codec); assertEquals(OpensslAesCtrCryptoCodec.class.getCanonicalName(), codec.getClass().getCanonicalName()); }
@BeforeClass public static void beforeClass() throws Exception { UTIL.setJobWithoutMRCluster(); UTIL.startMiniCluster(); HTable table = UTIL.createMultiRegionTable(MULTI_REGION_TABLE_NAME, new byte[][] { INPUT_FAMILY, OUTPUT_FAMILY }); UTIL.loadTable(table, INPUT_FAMILY, false); }
@BeforeClass public static void setUpClass() { final Properties props = new Properties(); props.put("log-level", "none"); props.put("mcast-port", "0"); cache = new MCacheFactory(props).create(); mCache = MCacheFactory.getAnyInstance(); }
@BeforeClass public static void beforeClass() { File file = FileUtils.getFileRelativeTo(MockServerTest.class, "_mock.feature"); server = FeatureServer.start(file, 0, false, null); int port = server.getPort(); System.setProperty("karate.server.port", port + ""); }
@BeforeClass public static void beforeAll() throws Exception { ApplicationContext context = new AnnotationConfigApplicationContext(Application.class); jetty = context.getBean(Server.class); jetty.start(); //jetty.join(); }
/** * Deletes Categories and Types from the target CTP projects, then it populates it with category test data. */ @BeforeClass public static void setup() { deleteAllCategories(CTP_TARGET_CLIENT); deleteTypes(CTP_TARGET_CLIENT); final CategoryDraft oldCategoryDraft = CategoryDraftBuilder .of(LocalizedString.of(Locale.ENGLISH, "classic furniture"), LocalizedString.of(Locale.ENGLISH, "classic-furniture", Locale.GERMAN, "klassische-moebel")) .orderHint("oldOrderHint") .build(); oldCategory = CTP_TARGET_CLIENT.execute(CategoryCreateCommand.of(oldCategoryDraft)) .toCompletableFuture() .join(); }
@BeforeClass public static void init() { mocks.add(mock(HystrixMetricsInitializationListener.class)); mocks.add(mock(HystrixMetricsInitializationListener.class)); mocks.add(mock(HystrixMetricsInitializationListener.class)); mocks.forEach(l -> notifier.addListener(l)); }
@BeforeClass public static void setup() { longIdManagerConfig.addProperty(TinkerGraph.GREMLIN_TINKERGRAPH_EDGE_ID_MANAGER, TinkerGraph.DefaultIdManager.LONG.name()); longIdManagerConfig.addProperty(TinkerGraph.GREMLIN_TINKERGRAPH_VERTEX_ID_MANAGER, TinkerGraph.DefaultIdManager.LONG.name()); longIdManagerConfig.addProperty(TinkerGraph.GREMLIN_TINKERGRAPH_VERTEX_PROPERTY_ID_MANAGER, TinkerGraph.DefaultIdManager.LONG.name()); integerIdManagerConfig.addProperty(TinkerGraph.GREMLIN_TINKERGRAPH_EDGE_ID_MANAGER, TinkerGraph.DefaultIdManager.INTEGER.name()); integerIdManagerConfig.addProperty(TinkerGraph.GREMLIN_TINKERGRAPH_VERTEX_ID_MANAGER, TinkerGraph.DefaultIdManager.INTEGER.name()); integerIdManagerConfig.addProperty(TinkerGraph.GREMLIN_TINKERGRAPH_VERTEX_PROPERTY_ID_MANAGER, TinkerGraph.DefaultIdManager.INTEGER.name()); }
@BeforeClass public static void setupFixture() throws IOException { createBucket(BUCKET); file = new RandomTempFile(10_000); s3.putObject(PutObjectRequest.builder() .bucket(BUCKET) .key(KEY) .build(), file.toPath()); }
@BeforeClass public static void init() { // 创建SessionFactory sf = new MetadataSources(new StandardServiceRegistryBuilder().configure( "oracle.hibernate.cfg.xml").build()).buildMetadata().buildSessionFactory(); }
@BeforeClass public static void setup() throws Exception { // start minicluster conf = new YarnConfiguration(); conf.setLong( YarnConfiguration.RM_AMRM_TOKEN_MASTER_KEY_ROLLING_INTERVAL_SECS, rolling_interval_sec); conf.setLong(YarnConfiguration.RM_AM_EXPIRY_INTERVAL_MS, am_expire_ms); conf.setInt(YarnConfiguration.RM_NM_HEARTBEAT_INTERVAL_MS, 100); conf.setLong(YarnConfiguration.NM_LOG_RETAIN_SECONDS, 1); yarnCluster = new MiniYARNCluster(TestAMRMClient.class.getName(), nodeCount, 1, 1); yarnCluster.init(conf); yarnCluster.start(); // start rm client yarnClient = YarnClient.createYarnClient(); yarnClient.init(conf); yarnClient.start(); // get node info nodeReports = yarnClient.getNodeReports(NodeState.RUNNING); priority = Priority.newInstance(1); priority2 = Priority.newInstance(2); capability = Resource.newInstance(1024, 1, 1); node = nodeReports.get(0).getNodeId().getHost(); rack = nodeReports.get(0).getRackName(); nodes = new String[]{ node }; racks = new String[]{ rack }; }
@BeforeClass public static void beforeClass() throws IOException, InterruptedException { if (!RedissonRuntimeEnvironment.isTravis) { RedisRunner.startDefaultRedisServerInstance(); defaultRedisson = createInstance(); } }
@BeforeClass public static void setUpOnce() throws Exception { WebserviceTestBase.getMailReader().deleteMails(); WebserviceTestBase.getOperator().addCurrency("EUR"); setup = new WebserviceTestSetup(); setup.createSupplier("Supplier"); }
@BeforeClass public static void setup() throws Exception { testNoResult("alter session set `store.parquet.enable_dictionary_encoding_binary_type`=true"); testNoResult("CREATE TABLE dfs_test.globaldictionary AS SELECT * FROM cp.`globaldictionary.json`"); testNoResult("CREATE TABLE dfs_test.places AS SELECT * FROM cp.`places.json`"); fs = FileSystem.getLocal(new Configuration()); tableDirPath1 = new Path(getDfsTestTmpSchemaLocation() + "/globaldictionary"); GlobalDictionaryBuilder.createGlobalDictionaries(fs, tableDirPath1, getAllocator()); tableDirPath2 = new Path(getDfsTestTmpSchemaLocation() + "/places"); GlobalDictionaryBuilder.createGlobalDictionaries(fs, tableDirPath2, getAllocator()); }
@BeforeClass public static void closeWelcomePage() { try { SWTBotPreferences.TIMEOUT = 6000; } catch (Exception e) { e.printStackTrace(); } }
@BeforeClass public static void setup() { RestClient restClient = new RestClient.Builder() .withBaseUrl("http://localhost:3000") .withSerializerAdapter(new AzureJacksonAdapter()) .withResponseBuilderFactory(new AzureResponseBuilder.Factory()) .build(); client = new AutoRestPagingTestServiceImpl(restClient);; }
@BeforeClass public static void init() { // 创建SessionFactory sf = new MetadataSources(new StandardServiceRegistryBuilder().configure( "hibernate.cfg.xml").build()).buildMetadata().buildSessionFactory(); }
@BeforeClass public static void setupConsumer(){ System.setProperty(BenchmarkOptionsSystemProperties.CUSTOMKEY_PROPERTY,""+size); //if(h2consumer!=null) h2consumer = new H2Consumer(dbFile); }
/** * Sets up Spark and loads the SNOMED mappings for testing. */ @BeforeClass public static void setUp() { spark = SparkSession.builder() .master("local[2]") .appName("SnomedTest") .getOrCreate(); snomedValues = Snomed.readRelationshipFile(spark, "src/test/resources/SNOMED_RELATIONSHIP_SAMPLE.TXT"); snomedValues.cache(); }