Skip to content

Commit

Permalink
Fixed javadoc style (wgzhao#117)
Browse files Browse the repository at this point in the history
  • Loading branch information
wgzhao authored Feb 10, 2021
1 parent f54a09c commit 7752e0a
Show file tree
Hide file tree
Showing 41 changed files with 341 additions and 257 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -6,20 +6,15 @@
import java.math.BigInteger;
import java.util.Date;

/**
* Created by jingxing on 14-8-24.
* <p/>
*/
public abstract class Column
{

private Type type;

private Object rawData;

private int byteSize;

public Column(final Object object, final Type type, int byteSize)
public Column(Object object, Type type, int byteSize)
{
this.rawData = object;
this.type = type;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,41 +28,41 @@ public DateColumn()
* 构建值为stamp(Unix时间戳)的DateColumn,使用Date子类型为DATETIME
* 实际存储有date改为long的ms,节省存储
*/
public DateColumn(final Long stamp)
public DateColumn( Long stamp)
{
super(stamp, Column.Type.DATE, (null == stamp ? 0 : 8));
}

/**
/*
* 构建值为date(java.util.Date)的DateColumn,使用Date子类型为DATETIME
*/
public DateColumn(final Date date)
public DateColumn( Date date)
{
this(date == null ? null : date.getTime());
}

/**
/*
* 构建值为date(java.sql.Date)的DateColumn,使用Date子类型为DATE,只有日期,没有时间
*/
public DateColumn(final java.sql.Date date)
public DateColumn( java.sql.Date date)
{
this(date == null ? null : date.getTime());
this.setSubType(DateType.DATE);
}

/**
/*
* 构建值为time(java.sql.Time)的DateColumn,使用Date子类型为TIME,只有时间,没有日期
*/
public DateColumn(final java.sql.Time time)
public DateColumn( java.sql.Time time)
{
this(time == null ? null : time.getTime());
this.setSubType(DateType.TIME);
}

/**
/*
* 构建值为ts(java.sql.Timestamp)的DateColumn,使用Date子类型为DATETIME
*/
public DateColumn(final java.sql.Timestamp ts)
public DateColumn( java.sql.Timestamp ts)
{
this(ts == null ? null : ts.getTime());
this.setSubType(DateType.DATETIME);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ public class DoubleColumn
extends Column
{

public DoubleColumn(final String data)
public DoubleColumn( String data)
{
this(data, null == data ? 0 : data.length());
this.validate(data);
Expand All @@ -30,7 +30,7 @@ public DoubleColumn(Integer data)
/**
* Double无法表示准确的小数数据,我们不推荐使用该方法保存Double数据,建议使用String作为构造入参
*/
public DoubleColumn(final Double data)
public DoubleColumn( Double data)
{
this(data == null ? null
: new BigDecimal(String.valueOf(data)).toPlainString());
Expand All @@ -39,18 +39,18 @@ public DoubleColumn(final Double data)
/**
* Float无法表示准确的小数数据,我们不推荐使用该方法保存Float数据,建议使用String作为构造入参
*/
public DoubleColumn(final Float data)
public DoubleColumn( Float data)
{
this(data == null ? null
: new BigDecimal(String.valueOf(data)).toPlainString());
}

public DoubleColumn(final BigDecimal data)
public DoubleColumn( BigDecimal data)
{
this(null == data ? null : data.toPlainString());
}

public DoubleColumn(final BigInteger data)
public DoubleColumn( BigInteger data)
{
this(null == data ? null : data.toString());
}
Expand All @@ -60,7 +60,7 @@ public DoubleColumn()
this((String) null);
}

private DoubleColumn(final String data, int byteSize)
private DoubleColumn( String data, int byteSize)
{
super(data, Column.Type.DOUBLE, byteSize);
}
Expand Down Expand Up @@ -92,8 +92,8 @@ public Double asDouble()

String string = (String) this.getRawData();

boolean isDoubleSpecific = string.equals("NaN")
|| string.equals("-Infinity") || string.equals("+Infinity");
boolean isDoubleSpecific = "NaN".equals(string)
|| "-Infinity".equals(string) || "+Infinity".equals(string);
if (isDoubleSpecific) {
return Double.valueOf(string);
}
Expand Down Expand Up @@ -157,14 +157,14 @@ public byte[] asBytes()
CommonErrorCode.CONVERT_NOT_SUPPORT, "Double类型无法转为Bytes类型 .");
}

private void validate(final String data)
private void validate( String data)
{
if (null == data) {
return;
}

if (data.equalsIgnoreCase("NaN") || data.equalsIgnoreCase("-Infinity")
|| data.equalsIgnoreCase("Infinity")) {
if ("NaN".equalsIgnoreCase(data) || "-Infinity".equalsIgnoreCase(data)
|| "Infinity".equalsIgnoreCase(data)) {
return;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,9 @@ public class LongColumn

/**
* 从整形字符串表示转为LongColumn,支持Java科学计数法
* <p>
* NOTE: <br>
* 如果data为浮点类型的字符串表示,数据将会失真,请使用DoubleColumn对接浮点字符串
*/
public LongColumn(final String data)
public LongColumn(String data)
{
super(null, Column.Type.LONG, 0);
if (null == data) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,13 @@ public interface JobPluginCollector
extends PluginCollector
{

/**
* 从Task获取自定义收集信息
*/

// 从Task获取自定义收集信息

Map<String, List<String>> getMessage();

/**
* 从Task获取自定义收集信息
*/

// 从Task获取自定义收集信息

List<String> getMessage(String key);
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,17 +19,17 @@ public abstract class TaskPluginCollector
* @param t 异常信息
* @param errorMessage 错误的提示信息
*/
public abstract void collectDirtyRecord(final Record dirtyRecord,
final Throwable t, final String errorMessage);
public abstract void collectDirtyRecord( Record dirtyRecord,
Throwable t, String errorMessage);

/**
* 收集脏数据
*
* @param dirtyRecord 脏数据信息
* @param errorMessage 错误的提示信息
*/
public void collectDirtyRecord(final Record dirtyRecord,
final String errorMessage)
public void collectDirtyRecord( Record dirtyRecord,
String errorMessage)
{
this.collectDirtyRecord(dirtyRecord, null, errorMessage);
}
Expand All @@ -40,14 +40,16 @@ public void collectDirtyRecord(final Record dirtyRecord,
* @param dirtyRecord 脏数据信息
* @param t 异常信息
*/
public void collectDirtyRecord(final Record dirtyRecord, final Throwable t)
public void collectDirtyRecord( Record dirtyRecord, Throwable t)
{
this.collectDirtyRecord(dirtyRecord, t, "");
}

/**
* 收集自定义信息,Job插件可以通过getMessage获取该信息 <br >
* 如果多个key冲突,内部使用List记录同一个key,多个value情况。<br >
* @param key message key
* @param value message content
*/
public abstract void collectMessage(final String key, final String value);
public abstract void collectMessage( String key, String value);
}
20 changes: 9 additions & 11 deletions common/src/main/java/com/wgzhao/datax/common/spi/Reader.java
Original file line number Diff line number Diff line change
Expand Up @@ -25,20 +25,18 @@ public abstract static class Job
/**
* 切分任务
*
* @param adviceNumber 着重说明下,adviceNumber是框架建议插件切分的任务数,插件开发人员最好切分出来的任务数>=
* adviceNumber。<br>
* <br>
* @param adviceNumber 着重说明下,adviceNumber是框架建议插件切分的任务数,插件开发人员最好切分出来的任务数大于
* adviceNumber。
* 之所以采取这个建议是为了给用户最好的实现,例如框架根据计算认为用户数据存储可以支持100个并发连接,
* 并且用户认为需要100个并发。 此时,插件开发人员如果能够根据上述切分规则进行切分并做到>=100连接信息
* DataX就可以同时启动100个Channel,这样给用户最好的吞吐量 <br>
* 并且用户认为需要100个并发。 此时,插件开发人员如果能够根据上述切分规则进行切分并做到超过100连接信息
* DataX就可以同时启动100个Channel,这样给用户最好的吞吐量
* 例如用户同步一张Mysql单表,但是认为可以到10并发吞吐量,插件开发人员最好对该表进行切分,比如使用主键范围切分,
* 并且如果最终切分任务数到>=10,我们就可以提供给用户最大的吞吐量。 <br>
* <br>
* 当然,我们这里只是提供一个建议值,Reader插件可以按照自己规则切分。但是我们更建议按照框架提供的建议值来切分。 <br>
* <br>
* 并且如果最终切分任务数大于等于10,我们就可以提供给用户最大的吞吐量。
* 当然,我们这里只是提供一个建议值,Reader插件可以按照自己规则切分。但是我们更建议按照框架提供的建议值来切分。
* 对于ODPS写入OTS而言,如果存在预排序预切分问题,这样就可能只能按照分区信息切分,无法更细粒度切分,
* 这类情况只能按照源头物理信息切分规则切分。 <br>
* <br>
* 这类情况只能按照源头物理信息切分规则切分。
* @return list of configuration
*
*/
public abstract List<Configuration> split(int adviceNumber);
}
Expand Down
2 changes: 2 additions & 0 deletions common/src/main/java/com/wgzhao/datax/common/spi/Writer.java
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@ public abstract static class Job
* 切分任务。<br>
*
* @param mandatoryNumber 为了做到Reader、Writer任务数对等,这里要求Writer插件必须按照源端的切分数进行切分。否则框架报错!
*
* @return list of configuration
*/
public abstract List<Configuration> split(int mandatoryNumber);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ public static synchronized PerfTrace getInstance(boolean isJob, long jobId, int
return instance;
}

/**
/*
* 因为一个JVM只有一个,因此在getInstance(isJob,jobId,taskGroupId)调用完成实例化后,方便后续调用,直接返回该实例
*/
public static synchronized PerfTrace getInstance()
Expand Down
Loading

0 comments on commit 7752e0a

Please sign in to comment.