Skip to content

Commit

Permalink
Updates for tech reviews
Browse files Browse the repository at this point in the history
  • Loading branch information
mminella committed Feb 11, 2019
1 parent 2fe5f37 commit 1998155
Show file tree
Hide file tree
Showing 27 changed files with 1,463 additions and 1,375 deletions.
30 changes: 28 additions & 2 deletions Chapter09/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
<modelVersion>4.0.0</modelVersion>

<groupId>com.apress.batch</groupId>
<artifactId>chapter9</artifactId>
<artifactId>chapter09</artifactId>
<version>0.0.1-SNAPSHOT</version>
<packaging>jar</packaging>

Expand All @@ -14,7 +14,7 @@
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.0.1.RELEASE</version>
<version>2.1.2.RELEASE</version>
<relativePath/> <!-- lookup parent from repository -->
</parent>

Expand All @@ -29,6 +29,12 @@
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-batch</artifactId>
<exclusions>
<exclusion>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
Expand Down Expand Up @@ -59,6 +65,26 @@
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-mail</artifactId>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-mongodb</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-neo4j</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-gemfire</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.shell</groupId>
<artifactId>spring-shell</artifactId>
</dependency>

<dependency>
<groupId>org.hsqldb</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@ public class Chapter9Application {
public static void main(String[] args) {
List<String> newArgs = new ArrayList<>(3);
newArgs.add("customerFile=/data/customerWithEmail.csv");
newArgs.add("outputFile=file:/Users/mminella/Documents/IntelliJWorkspace/def-guide-spring-batch/Chapter9/target/formattedCustomers.xml");
newArgs.add("outputFile=file:/tmp/customer.xml");

SpringApplication.run(Chapter9Application.class, newArgs.toArray(new String[newArgs.size()]));
SpringApplication.run(Chapter9Application.class, newArgs.toArray(new String[0]));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,145 +15,117 @@
*/
package com.apress.batch.chapter9.configuration;

import java.util.HashMap;
import java.util.Map;
import javax.sql.DataSource;

import com.apress.batch.chapter9.batch.CustomerClassifier;
import com.apress.batch.chapter9.domain.Customer;

import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.item.ItemStreamWriter;
import org.springframework.batch.item.ItemWriter;
import org.springframework.batch.item.database.JdbcBatchItemWriter;
import org.springframework.batch.item.database.builder.JdbcBatchItemWriterBuilder;
import org.springframework.batch.item.file.FlatFileItemReader;
import org.springframework.batch.item.file.builder.FlatFileItemReaderBuilder;
import org.springframework.batch.item.support.ClassifierCompositeItemWriter;
import org.springframework.batch.item.support.builder.ClassifierCompositeItemWriterBuilder;
import org.springframework.batch.item.xml.StaxEventItemWriter;
import org.springframework.batch.item.xml.builder.StaxEventItemWriterBuilder;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.classify.Classifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.Resource;
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
import org.springframework.oxm.xstream.XStreamMarshaller;

/**
* @author Michael Minella
*/
@Configuration
public class ClassifierCompositeItemWriterJob {

private JobBuilderFactory jobBuilderFactory;

private StepBuilderFactory stepBuilderFactory;

public ClassifierCompositeItemWriterJob(JobBuilderFactory jobBuilderFactory,
StepBuilderFactory stepBuilderFactory) {

this.jobBuilderFactory = jobBuilderFactory;
this.stepBuilderFactory = stepBuilderFactory;
}

@Bean
@StepScope
public FlatFileItemReader<Customer> classifierCompositeWriterItemReader(
@Value("#{jobParameters['customerFile']}")Resource inputFile) {

return new FlatFileItemReaderBuilder<Customer>()
.name("classifierCompositeWriterItemReader")
.resource(inputFile)
.delimited()
.names(new String[] {"firstName",
"middleInitial",
"lastName",
"address",
"city",
"state",
"zip",
"email"})
.targetType(Customer.class)
.build();
}

@Bean
@StepScope
public StaxEventItemWriter<Customer> xmlDelegate(
@Value("#{jobParameters['outputFile']}") Resource outputFile) throws Exception {

Map<String, Class> aliases = new HashMap<>();
aliases.put("customer", Customer.class);

XStreamMarshaller marshaller = new XStreamMarshaller();

marshaller.setAliases(aliases);

marshaller.afterPropertiesSet();

return new StaxEventItemWriterBuilder<Customer>()
.name("customerItemWriter")
.resource(outputFile)
.marshaller(marshaller)
.rootTagName("customers")
.build();
}

@Bean
public JdbcBatchItemWriter<Customer> jdbcDelgate(DataSource dataSource) {

return new JdbcBatchItemWriterBuilder<Customer>()
.namedParametersJdbcTemplate(new NamedParameterJdbcTemplate(dataSource))
.sql("INSERT INTO CUSTOMER (first_name, " +
"middle_initial, " +
"last_name, " +
"address, " +
"city, " +
"state, " +
"zip, " +
"email) " +
"VALUES(:firstName, " +
":middleInitial, " +
":lastName, " +
":address, " +
":city, " +
":state, " +
":zip, " +
":email)")
.beanMapped()
.build();
}

@Bean
public ClassifierCompositeItemWriter<Customer> classifierCompositeItemWriter() throws Exception {
Classifier<Customer, ItemWriter<? super Customer>> classifier = new CustomerClassifier(xmlDelegate(null), jdbcDelgate(null));

return new ClassifierCompositeItemWriterBuilder<Customer>()
.classifier(classifier)
.build();
}


@Bean
public Step classifierCompositeWriterStep() throws Exception {
return this.stepBuilderFactory.get("classifierCompositeWriterStep")
.<Customer, Customer>chunk(10)
.reader(classifierCompositeWriterItemReader(null))
.writer(classifierCompositeItemWriter())
.stream(xmlDelegate(null))
.build();
}

@Bean
public Job classifierCompositeWriterJob() throws Exception {
return this.jobBuilderFactory.get("classifierCompositeWriterJob")
.start(classifierCompositeWriterStep())
.build();
}
//
// private JobBuilderFactory jobBuilderFactory;
//
// private StepBuilderFactory stepBuilderFactory;
//
// public ClassifierCompositeItemWriterJob(JobBuilderFactory jobBuilderFactory,
// StepBuilderFactory stepBuilderFactory) {
//
// this.jobBuilderFactory = jobBuilderFactory;
// this.stepBuilderFactory = stepBuilderFactory;
// }
//
// @Bean
// @StepScope
// public FlatFileItemReader<Customer> classifierCompositeWriterItemReader(
// @Value("#{jobParameters['customerFile']}")Resource inputFile) {
//
// return new FlatFileItemReaderBuilder<Customer>()
// .name("classifierCompositeWriterItemReader")
// .resource(inputFile)
// .delimited()
// .names(new String[] {"firstName",
// "middleInitial",
// "lastName",
// "address",
// "city",
// "state",
// "zip",
// "email"})
// .targetType(Customer.class)
// .build();
// }
//
// @Bean
// @StepScope
// public StaxEventItemWriter<Customer> xmlDelegate(
// @Value("#{jobParameters['outputFile']}") Resource outputFile) throws Exception {
//
// Map<String, Class> aliases = new HashMap<>();
// aliases.put("customer", Customer.class);
//
// XStreamMarshaller marshaller = new XStreamMarshaller();
//
// marshaller.setAliases(aliases);
//
// marshaller.afterPropertiesSet();
//
// return new StaxEventItemWriterBuilder<Customer>()
// .name("customerItemWriter")
// .resource(outputFile)
// .marshaller(marshaller)
// .rootTagName("customers")
// .build();
// }
//
// @Bean
// public JdbcBatchItemWriter<Customer> jdbcDelgate(DataSource dataSource) {
//
// return new JdbcBatchItemWriterBuilder<Customer>()
// .namedParametersJdbcTemplate(new NamedParameterJdbcTemplate(dataSource))
// .sql("INSERT INTO CUSTOMER (first_name, " +
// "middle_initial, " +
// "last_name, " +
// "address, " +
// "city, " +
// "state, " +
// "zip, " +
// "email) " +
// "VALUES(:firstName, " +
// ":middleInitial, " +
// ":lastName, " +
// ":address, " +
// ":city, " +
// ":state, " +
// ":zip, " +
// ":email)")
// .beanMapped()
// .build();
// }
//
// @Bean
// public ClassifierCompositeItemWriter<Customer> classifierCompositeItemWriter() throws Exception {
// Classifier<Customer, ItemWriter<? super Customer>> classifier = new CustomerClassifier(xmlDelegate(null), jdbcDelgate(null));
//
// return new ClassifierCompositeItemWriterBuilder<Customer>()
// .classifier(classifier)
// .build();
// }
//
//
// @Bean
// public Step classifierCompositeWriterStep() throws Exception {
// return this.stepBuilderFactory.get("classifierCompositeWriterStep")
// .<Customer, Customer>chunk(10)
// .reader(classifierCompositeWriterItemReader(null))
// .writer(classifierCompositeItemWriter())
// .stream(xmlDelegate(null))
// .build();
// }
//
// @Bean
// public Job classifierCompositeWriterJob() throws Exception {
// return this.jobBuilderFactory.get("classifierCompositeWriterJob")
// .start(classifierCompositeWriterStep())
// .build();
// }
}
Loading

0 comments on commit 1998155

Please sign in to comment.