Skip to content

Commit

Permalink
fix configuration conversion bugs and docs and hdfs configs
Browse files Browse the repository at this point in the history
  • Loading branch information
fjy committed Oct 29, 2013
1 parent fbb2e8d commit 21235ae
Show file tree
Hide file tree
Showing 5 changed files with 30 additions and 14 deletions.
8 changes: 4 additions & 4 deletions docs/content/Indexing-Service.md
Original file line number Diff line number Diff line change
Expand Up @@ -233,10 +233,10 @@ Additional peon configs include:
|Property|Description|Default|
|--------|-----------|-------|
|`druid.peon.mode`|Choices are "local" and "remote". Setting this to local means you intend to run the peon as a standalone node (Not recommended).|remote|
|`druid.indexer.baseDir`|Base temporary working directory.|/tmp|
|`druid.indexer.baseTaskDir`|Base temporary working directory for tasks.|/tmp/persistent/tasks|
|`druid.indexer.hadoopWorkingPath`|Temporary working directory for Hadoop tasks.|/tmp/druid-indexing|
|`druid.indexer.defaultRowFlushBoundary`|Highest row count before persisting to disk. Used for indexing generating tasks.|50000|
|`druid.indexer.task.baseDir`|Base temporary working directory.|/tmp|
|`druid.indexer.task.baseTaskDir`|Base temporary working directory for tasks.|/tmp/persistent/tasks|
|`druid.indexer.task.hadoopWorkingPath`|Temporary working directory for Hadoop tasks.|/tmp/druid-indexing|
|`druid.indexer.task.defaultRowFlushBoundary`|Highest row count before persisting to disk. Used for indexing generating tasks.|50000|
|`druid.indexer.task.chathandler.type`|Choices are "noop" and "announce". Certain tasks will use service discovery to announce an HTTP endpoint that events can be posted to.|noop|

If the peon is running in remote mode, there must be an overlord up and running. Running peons in remote mode require the following configurations:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@

/**
*/
public abstract class HdfsDataSegmentPusherConfig
public class HdfsDataSegmentPusherConfig
{
@JsonProperty
public String storageDirectory = "";
Expand Down
2 changes: 1 addition & 1 deletion services/src/main/java/io/druid/cli/CliCoordinator.java
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ public void configure(Binder binder)
{
ConfigProvider.bind(binder, DruidCoordinatorConfig.class);

JsonConfigProvider.bind(binder, "druid.manager.segment", DatabaseSegmentManagerConfig.class);
JsonConfigProvider.bind(binder, "druid.manager.segments", DatabaseSegmentManagerConfig.class);
JsonConfigProvider.bind(binder, "druid.manager.rules", DatabaseRuleManagerConfig.class);

binder.bind(RedirectServlet.class).in(LazySingleton.class);
Expand Down
1 change: 0 additions & 1 deletion services/src/main/java/io/druid/cli/CliPeon.java
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,6 @@
import io.druid.indexing.worker.executor.ChatHandlerResource;
import io.druid.indexing.worker.executor.ExecutorLifecycle;
import io.druid.indexing.worker.executor.ExecutorLifecycleConfig;
import io.druid.initialization.DruidModule;
import io.druid.query.QuerySegmentWalker;
import io.druid.segment.loading.DataSegmentKiller;
import io.druid.segment.loading.OmniDataSegmentKiller;
Expand Down
31 changes: 24 additions & 7 deletions services/src/main/java/io/druid/cli/convert/ConvertProperties.java
Original file line number Diff line number Diff line change
Expand Up @@ -22,18 +22,19 @@
import com.google.common.base.Charsets;
import com.google.common.base.Throwables;
import com.google.common.collect.Lists;
import com.google.common.io.Closeables;
import com.metamx.common.logger.Logger;
import io.airlift.command.Command;
import io.airlift.command.Option;

import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.Reader;
import java.io.Writer;
import java.util.List;
import java.util.Map;
import java.util.Properties;
Expand All @@ -57,12 +58,16 @@ public class ConvertProperties implements Runnable
new Rename("druid.database.connectURI", "druid.db.connector.connectURI"),
new Rename("druid.database.user", "druid.db.connector.user"),
new Rename("druid.database.password", "druid.db.connector.password"),
new Rename("druid.database.poll.duration", "druid.manager.segment.pollDuration"),
new Rename("druid.database.password", "druid.db.connector.password"),
new Rename("com.metamx.emitter", "druid.emitter"),
new Rename("com.metamx.emitter.logging", "druid.emitter.logging"),
new Rename("com.metamx.emitter.logging.level", "druid.emitter.logging.logLevel"),
new Rename("com.metamx.emitter.http", "druid.emitter.http"),
new Rename("com.metamx.emitter.http.url", "druid.emitter.http.recipientBaseUrl"),
new Rename("com.metamx.emitter.period", "druid.emitter.emissionPeriod"),
new Rename("com.metamx.druid.emitter.period", "druid.emitter.emissionPeriod"),
new Rename("com.metamx.metrics.emitter.period", "druid.emitter.emissionPeriod"),
new PrefixRename("com.metamx.emitter", "druid.emitter"),
new PrefixRename("com.metamx.druid.emitter", "druid.emitter"),
new IndexCacheConverter(),
Expand All @@ -80,7 +85,6 @@ public class ConvertProperties implements Runnable
new Rename("druid.indexer.fork.java", "druid.indexer.runner.javaCommand"),
new Rename("druid.indexer.fork.opts", "druid.indexer.runner.javaOpts"),
new Rename("druid.indexer.fork.classpath", "druid.indexer.runner.classpath"),
new Rename("druid.indexer.fork.main", "druid.indexer.runner.mainClass"),
new Rename("druid.indexer.fork.hostpattern", "druid.indexer.runner.hostPattern"),
new Rename("druid.indexer.fork.startport", "druid.indexer.runner.startPort"),
new Rename("druid.indexer.properties.prefixes", "druid.indexer.runner.allowedPrefixes"),
Expand Down Expand Up @@ -110,6 +114,7 @@ public class ConvertProperties implements Runnable
new Rename("druid.master.merger.service", "druid.selectors.indexing.serviceName"),
new Rename("druid.master.period.segmentMerger", "druid.coordinator.period.indexingPeriod"),
new Rename("druid.master.merger.on", "druid.coordinator.merge.on"),
new Rename("druid.master.period", "druid.coordinator.period"),
new PrefixRename("druid.master", "druid.coordinator"),
new PrefixRename("druid.pusher", "druid.storage"),
new DataSegmentPusherDefaultConverter(),
Expand Down Expand Up @@ -139,8 +144,7 @@ public void run()

Properties fromFile = new Properties();

try (Reader in = new InputStreamReader(new FileInputStream(file), Charsets.UTF_8))
{
try (Reader in = new InputStreamReader(new FileInputStream(file), Charsets.UTF_8)) {
fromFile.load(in);
}
catch (IOException e) {
Expand All @@ -157,6 +161,7 @@ public void run()
for (Map.Entry<String, String> entry : converter.convert(fromFile).entrySet()) {
if (entry.getValue() != null) {
++count;
log.info("Converting [%s] to [%s]", property, entry.getKey());
updatedProps.setProperty(entry.getKey(), entry.getValue());
}
}
Expand All @@ -165,6 +170,7 @@ public void run()
}

if (!handled) {
log.info("Not converting [%s]", property);
updatedProps.put(property, fromFile.getProperty(property));
}
}
Expand All @@ -173,13 +179,24 @@ public void run()
"druid.monitoring.monitors", "[\"io.druid.server.metrics.ServerMonitor\", \"com.metamx.metrics.SysMonitor\"]"
);

try (Writer out = new OutputStreamWriter(new FileOutputStream(outFile), Charsets.UTF_8))
{
updatedProps.store(out, null);
BufferedWriter out = null;
try {
out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(outFile), Charsets.UTF_8));
for (Map.Entry<Object, Object> prop : updatedProps.entrySet()) {
out.write((String) prop.getKey());
out.write("=");
out.write((String) prop.getValue());
out.newLine();
}
}
catch (IOException e) {
throw Throwables.propagate(e);
}
finally {
if (out != null) {
Closeables.closeQuietly(out);
}
}

log.info("Completed! Converted[%,d] properties.", count);
}
Expand Down

0 comments on commit 21235ae

Please sign in to comment.