Skip to content

Commit

Permalink
Merge pull request Peergos#542 from Peergos/update/ipfs-v0.4.22
Browse files Browse the repository at this point in the history
Update/ipfs v0.4.22
  • Loading branch information
ianopolous authored Aug 16, 2019
2 parents bcfe289 + bb954e4 commit 05811d1
Show file tree
Hide file tree
Showing 3 changed files with 308 additions and 73 deletions.
19 changes: 18 additions & 1 deletion src/peergos/server/Main.java
Original file line number Diff line number Diff line change
Expand Up @@ -49,9 +49,26 @@ public class Main {
throw new IllegalStateException("Specified install directory "+ dir +" doesn't exist and can't be created");

IpfsInstaller.ensureInstalled(ipfsExePath);

List<IpfsInstaller.Plugin> plugins = IpfsInstaller.Plugin.parseAll(args);
Path ipfsDir = IpfsWrapper.getIpfsDir(args);
if (! plugins.isEmpty())
if (! ipfsDir.toFile().exists() && ! ipfsDir.toFile().mkdirs())
throw new IllegalStateException("Couldn't create ipfs dir: " + ipfsDir);

for (IpfsInstaller.Plugin plugin : plugins) {
plugin.ensureInstalled(ipfsDir);
}
},
Arrays.asList(
new Command.Arg("ipfs-exe-path", "Desired path to IPFS executable. Defaults to $PEERGOS_PATH/ipfs", false)
new Command.Arg("ipfs-exe-path", "Desired path to IPFS executable. Defaults to $PEERGOS_PATH/ipfs", false),
new Command.Arg("ipfs-plugins", "comma separated list of ipfs plugins to install, currently only go-ds-s3 is supported", false),
new Command.Arg("s3.path", "Path of data store in S3", false, "blocks"),
new Command.Arg("s3.bucket", "S3 bucket name", false),
new Command.Arg("s3.region", "S3 region", false, "us-east-1"),
new Command.Arg("s3.accessKey", "S3 access key", false, ""),
new Command.Arg("s3.secretKey", "S3 secret key", false, ""),
new Command.Arg("s3.region.endpoint", "Base url for S3 service", false)
)
);
public static Command IPFS = new Command("ipfs",
Expand Down
236 changes: 204 additions & 32 deletions src/peergos/server/storage/IpfsInstaller.java
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,9 @@

import static peergos.server.util.Logging.LOG;

import peergos.server.util.*;
import peergos.shared.crypto.hash.*;
import peergos.shared.io.ipfs.api.*;
import peergos.shared.io.ipfs.cid.*;
import peergos.shared.io.ipfs.multihash.*;
import peergos.shared.util.*;
Expand All @@ -14,40 +16,170 @@
import java.util.stream.*;

/**
* A Utility for installing IPFS.
* A Utility for installing IPFS and associated plugins.
*/
public class IpfsInstaller {

public enum DownloadTarget {
DARWIN_386("https://github.com/peergos/ipfs-releases/blob/master/v0.4.21/darwin-386/ipfs?raw=true",
Cid.decode("QmeQhmz7x3zQoKk8YxWctkKJ8eEWFEcBruo4GnvPZfcDZo")),
DARWIN_AMD64("https://github.com/peergos/ipfs-releases/blob/master/v0.4.21/darwin-amd64/ipfs?raw=true",
Cid.decode("Qmb7VnuQWsc9Nk1ZPZu9KfGu6kihVQZzKBqzsVENZQqt8B")),
FREEBSD_386("https://github.com/peergos/ipfs-releases/blob/master/v0.4.21/freebsd-386/ipfs?raw=true",
Cid.decode("QmSi7mCqhzuyo29kXPCsmyxhACAPwyRpzBwXbpZX4xF5uZ")),
FREEBSD_AMD64("https://github.com/peergos/ipfs-releases/blob/master/v0.4.21/freebsd-amd64/ipfs?raw=true",
Cid.decode("QmUN74yCNDnQKycxanzzYN3gwUqtpjvUkG41ZsLMurS4Kk")),
FREEBSD_ARM("https://github.com/peergos/ipfs-releases/blob/master/v0.4.21/freebsd-arm/ipfs?raw=true",
Cid.decode("QmTRLF6E8dgHB72EuNFG6UytM2rJzpJFZpT3RdCCasRVLJ")),
LINUX_386("https://github.com/peergos/ipfs-releases/blob/master/v0.4.21/linux-386/ipfs?raw=true",
Cid.decode("QmPBV4ghNPBjwNG8EHdmadntDR4mZCqwWN3uAEXAjNzij3")),
LINUX_AMD64("https://github.com/peergos/ipfs-releases/blob/master/v0.4.21/linux-amd64/ipfs?raw=true",
Cid.decode("QmY3ZFhh1hqSDcvS5us5JVYWHTrdRU4n3opFxWwx65tWBY")),
LINUX_ARM("https://github.com/peergos/ipfs-releases/blob/master/v0.4.21/linux-arm/ipfs?raw=true",
Cid.decode("QmcrXGWcDSBkHbkRfJRVp9NRi7G63BMkZ2uk7pyx6o2qWq")),
LINUX_ARM64("https://github.com/peergos/ipfs-releases/blob/master/v0.4.21/linux-arm64/ipfs?raw=true",
Cid.decode("QmXsxwWBUZfsMGMSzUep1zpMbCfzsYGFLkEegGuNL5X192")),
WINDOWS_386("https://github.com/peergos/ipfs-releases/blob/master/v0.4.21/windows-386/ipfs.exe?raw=true",
Cid.decode("Qmd62AkrhPzKqg8at8nNMzvTCtNc24eHveQ3m71zrXsVDc")),
WINDOWS_AMD64("https://github.com/peergos/ipfs-releases/blob/master/v0.4.21/windows-amd64/ipfs.exe?raw=true",
Cid.decode("QmW6QP2B9JSjikahjZyzq8DEhrLbCB6ZAqmHLP7JvDckcL")),;
S3_LINUX_AMD64("https://github.com/peergos/ipfs-releases/blob/master/v0.4.22/linux-amd64/plugins/go-ds-s3.so?raw=true",
Cid.decode("QmWAXFcFRZq4tyqVhFfH6yn6gBUGfmhRX19eUwYVE6PwSb")),
DARWIN_386("https://github.com/peergos/ipfs-releases/blob/master/v0.4.22/darwin-386/ipfs?raw=true",
Cid.decode("QmPNQqAExYqBAS1ruNakdirw36won2ELQJNbijZ4vXhd81")),
DARWIN_AMD64("https://github.com/peergos/ipfs-releases/blob/master/v0.4.22/darwin-amd64/ipfs?raw=true",
Cid.decode("QmQpC2vfdwyfhBpbYr76HsfFPMNmQqVroNsrqMrEb7BDUH")),
FREEBSD_386("https://github.com/peergos/ipfs-releases/blob/master/v0.4.22/freebsd-386/ipfs?raw=true",
Cid.decode("QmT7c37jtNN2PUCBT3rZsR7A7ofQzV1a3N3SUses1duLcw")),
FREEBSD_AMD64("https://github.com/peergos/ipfs-releases/blob/master/v0.4.22/freebsd-amd64/ipfs?raw=true",
Cid.decode("QmX7VgAc8kkYh3cdbe8zeGFC5V66pJeoyjjA7e25txdTBY")),
FREEBSD_ARM("https://github.com/peergos/ipfs-releases/blob/master/v0.4.22/freebsd-arm/ipfs?raw=true",
Cid.decode("QmZurjJxFJRpi85hTW4ymMXwnoMe5fDsBNAWnN6iwn2UGH")),
LINUX_386("https://github.com/peergos/ipfs-releases/blob/master/v0.4.22/linux-386/ipfs?raw=true",
Cid.decode("QmRviMmsVPVG3mLyefYXDQwyNh4AN1Yn6yG9YJ4wUv8jMA")),
LINUX_AMD64("https://github.com/peergos/ipfs-releases/blob/master/v0.4.22/linux-amd64/ipfs?raw=true",
Cid.decode("QmNc6hEaB3PZoiDvZp5hdt4FAkFomJEJHi3zi96VH51ybf"), Arrays.asList(S3_LINUX_AMD64)),
LINUX_ARM("https://github.com/peergos/ipfs-releases/blob/master/v0.4.22/linux-arm/ipfs?raw=true",
Cid.decode("QmeJ5L16uzPAaLX2K9BeU3yB9eVoeqzCMk11eiBoBHvtN1")),
LINUX_ARM64("https://github.com/peergos/ipfs-releases/blob/master/v0.4.22/linux-arm64/ipfs?raw=true",
Cid.decode("QmUPRX5aWM9FJpc1pE7fFMjhR3wgfCJpjTAcQJvitoCPpQ")),
WINDOWS_386("https://github.com/peergos/ipfs-releases/blob/master/v0.4.22/windows-386/ipfs.exe?raw=true",
Cid.decode("QmR5v39qAgKw4DFvg8xTYSek7kaQDTeLPB46QCZSN2sMw9")),
WINDOWS_AMD64("https://github.com/peergos/ipfs-releases/blob/master/v0.4.22/windows-amd64/ipfs.exe?raw=true",
Cid.decode("QmUKzACmhakxdNp2RQMtZGZQ9skiXX1yFn6A3MLYc3auqq")),;

public final String url;
public final Multihash multihash;
public final List<DownloadTarget> plugins;

DownloadTarget(String url, Multihash multihash, List<DownloadTarget> plugins) {
this.url = url;
this.multihash = multihash;
this.plugins = Collections.unmodifiableList(plugins);
}

DownloadTarget(String url, Multihash multihash) {
this.url = url;
this.multihash = multihash;
this.plugins = Collections.emptyList();
}
}

public interface Plugin {

void ensureInstalled(Path ipfsDir);

void configure(IpfsWrapper ipfs);

final class S3 implements Plugin {
public static final String TYPE = "S3";
public final String path, bucket, region, accessKey, secretKey, regionEndpoint;
public final DownloadTarget version;

public S3(String path, String bucket, String region, String accessKey, String secretKey,
String regionEndpoint, DownloadTarget version) {
this.path = path;
this.bucket = bucket;
this.region = region;
this.accessKey = accessKey;
this.secretKey = secretKey;
this.regionEndpoint = regionEndpoint;
this.version = version;
}

public String getFileName() {
return "go-ds-s3.so";
}

public Object toJson(Multihash nodeId) {
Map<String, Object> res = new TreeMap<>();
Map<String, Object> child = new TreeMap<>();
// Make sure that multiple IPFS instances can use the same S3 bucket by prefixing the path with their nodeID
String s3PathPrefix = nodeId.toString() + "/" + path;
child.put("path", s3PathPrefix);
child.put("bucket", bucket);
child.put("accessKey", accessKey);
child.put("secretKey", secretKey);
child.put("region", region);
child.put("regionEndpoint", regionEndpoint);
child.put("type", "s3ds");
res.put("child", child);
res.put("mountpoint", "/blocks");
res.put("prefix", "s3.datastore");
res.put("type", "measure");
return res;
}

public static S3 build(Args a) {
String path = a.getArg("s3.path", "blocks");
String bucket = a.getArg("s3.bucket");
String region = a.getArg("s3.region");
String accessKey = a.getArg("s3.accessKey", "");
String secretKey = a.getArg("s3.secretKey", "");
String regionEndpoint = a.getArg("s3.region.endpoint", bucket + ".amazonaws.com");
String osArch = getOsArch();
DownloadTarget pluginVersion = DownloadTarget.valueOf(TYPE + "_" + osArch.toUpperCase());
return new S3(path, bucket, region, accessKey, secretKey, regionEndpoint, pluginVersion);
}

@Override
public void configure(IpfsWrapper ipfs) {
// Do the configuration dance..
System.out.println("Configuring S3 datastore IPFS plugin");
Multihash nodeId = ipfs.nodeId();

// update the config file
List<Object> mount = Arrays.asList(
toJson(nodeId),
JSONParser.parse("{\n" +
" \"child\": {\n" +
" \"compression\": \"none\",\n" +
" \"path\": \"datastore\",\n" +
" \"type\": \"levelds\"\n" +
" },\n" +
" \"mountpoint\": \"/\",\n" +
" \"prefix\": \"leveldb.datastore\",\n" +
" \"type\": \"measure\"\n" +
" }")
);
String mounts = JSONParser.toString(mount);
ipfs.setConfig("Datastore.Spec.mounts", mounts);

// replace the datastore spec file
String newDataStoreSpec = "{\"mounts\":[{\"bucket\":\"" + bucket +
"\",\"mountpoint\":\"/blocks\",\"region\":\"" + region +
"\",\"rootDirectory\":\"\"},{\"mountpoint\":\"/\",\"path\":\"datastore\",\"type\":\"levelds\"}],\"type\":\"mount\"}";
Path specPath = ipfs.ipfsDir.resolve("datastore_spec");
try {
Files.write(specPath, newDataStoreSpec.getBytes());
} catch (IOException e) {
throw new RuntimeException("Couldn't overwrite ipfs datastore spec file", e);
}
}

@Override
public void ensureInstalled(Path ipfsDir) {
IpfsInstaller.ensurePluginInstalled(ipfsDir.resolve("plugins").resolve(getFileName()), version);
}
}

static List<Plugin> parseAll(Args args) {
List<String> plugins = Arrays.asList(args.getArg("ipfs-plugins", "").split(","))
.stream()
.filter(s -> !s.isEmpty())
.collect(Collectors.toList());

return plugins.stream()
.map(name -> parse(name, args))
.collect(Collectors.toList());
}

static Plugin parse(String pluginName, Args a) {
switch (pluginName) {
case "go-ds-s3": {
return S3.build(a);
}
default:
throw new IllegalStateException("Unknown plugin: " + pluginName);
}
}
}

Expand All @@ -65,11 +197,15 @@ public static Path getExecutableForOS(Path targetFile) {
}

private static DownloadTarget getForPlatform() {
String type = getOsArch();
return DownloadTarget.valueOf(type.toUpperCase());
}

private static String getOsArch() {
String os = canonicaliseOS(System.getProperty("os.name").toLowerCase());
String arch = canonicaliseArchitecture(System.getProperty("os.arch"));

String type = os + "_" + arch;
return DownloadTarget.valueOf(type.toUpperCase());
return os + "_" + arch;
}

private static String canonicaliseArchitecture(String arch) {
Expand All @@ -94,6 +230,29 @@ private static String canonicaliseOS(String os) {
return os;
}

private static void ensurePluginInstalled(Path targetFile, DownloadTarget downloadTarget) {
if (Files.exists(targetFile)) {
//check contents are correct
try {
byte[] raw = Files.readAllBytes(targetFile);
Multihash computed = new Multihash(Multihash.Type.sha2_256, Hash.sha256(raw));
if (computed.equals(downloadTarget.multihash)) {
//all present and correct
return;
}
targetFile.toFile().delete();
install(targetFile, downloadTarget, Optional.empty());
return;
} catch (IOException ioe) {
throw new IllegalStateException(ioe.getMessage(), ioe);
}
}
else {
LOG().info("Binary "+ targetFile + " not available");
}
install(targetFile, downloadTarget, Optional.empty());
}

private static void ensureInstalled(Path targetFile, DownloadTarget downloadTarget) {
if (Files.exists(targetFile)) {
//check contents are correct
Expand Down Expand Up @@ -126,8 +285,13 @@ private static void ensureInstalled(Path targetFile, DownloadTarget downloadTarg
private static void install(Path targetFile, DownloadTarget downloadTarget, Optional<Version> previousIpfsVersion) {
try {
Path cacheFile = getLocalCacheDir().resolve(downloadTarget.multihash.toString());
Path fileName = targetFile.getFileName();
File targetParent = targetFile.getParent().toFile();
if (! targetParent.exists())
if (! targetParent.mkdirs())
throw new IllegalStateException("Couldn't create parent directory: " + targetFile.getParent());
if (cacheFile.toFile().exists()) {
LOG().info("Using cached IPFS "+ cacheFile);
LOG().info("Using cached " + fileName + " " + cacheFile);
byte[] raw = Files.readAllBytes(cacheFile);
Multihash computed = new Multihash(Multihash.Type.sha2_256, Hash.sha256(raw));
if (computed.equals(downloadTarget.multihash)) {
Expand All @@ -143,24 +307,24 @@ private static void install(Path targetFile, DownloadTarget downloadTarget, Opti
}

URI uri = new URI(downloadTarget.url);
LOG().info("Downloading IPFS binary "+ downloadTarget.url +"...");
LOG().info("Downloading " + fileName + " binary "+ downloadTarget.url +"...");
byte[] raw = Serialize.readFully(uri.toURL().openStream());
Multihash computed = new Multihash(Multihash.Type.sha2_256, Hash.sha256(raw));

if (! computed.equals(downloadTarget.multihash))
throw new IllegalStateException("Incorrect hash for ipfs binary, aborting install!");
throw new IllegalStateException("Incorrect hash for binary, aborting install!");

// save to local cache
cacheFile.getParent().toFile().mkdirs();
atomicallySaveToFile(cacheFile, raw);

LOG().info("Writing ipfs-binary to "+ targetFile);
LOG().info("Writing " + fileName + " binary to "+ targetFile);
try {
atomicallySaveToFile(targetFile, raw);
} catch (FileAlreadyExistsException e) {
boolean delete = targetFile.toFile().delete();
if (! delete)
throw new IllegalStateException("Couldn't delete old version of ipfs!");
throw new IllegalStateException("Couldn't delete old version of " + fileName + "!");
atomicallySaveToFile(targetFile, raw);
}

Expand All @@ -187,13 +351,21 @@ private static Path getLocalCacheDir() {
}

public static void main(String[] args) throws Exception {
codegen(Paths.get("/home/ian/ipfs-releases/v0.4.21"));
String version = "v0.4.22";
byte[] bytes = Files.readAllBytes(Paths.get("/home", "ian", "ipfs-releases", version,
"linux-amd64", "plugins", "go-ds-s3.so"));
Multihash hash = new Multihash(Multihash.Type.sha2_256, Hash.sha256(bytes));
System.out.println("S3_LINUX_AMD64(\"https://github.com/peergos/ipfs-releases/blob/master/" + version +
"/linux-amd64/plugins/go-ds-s3.so?raw=true\", Cid.decode(\"" + hash + "\")),");
codegen(Paths.get("/home/ian/ipfs-releases/" + version));
}

private static void codegen(Path root) throws Exception {
String urlBase = "https://github.com/peergos/ipfs-releases/blob/master/" + root.getFileName() + "/";
for (File arch: Arrays.asList(root.toFile().listFiles()).stream().sorted().collect(Collectors.toList())) {
for (File binary: arch.listFiles()) {
if (binary.isDirectory())
continue;
byte[] bytes = Files.readAllBytes(binary.toPath());
Multihash hash = new Multihash(Multihash.Type.sha2_256, Hash.sha256(bytes));
System.out.println(arch.getName().toUpperCase().replaceAll("-", "_")
Expand All @@ -205,7 +377,7 @@ private static void codegen(Path root) throws Exception {

private static class ReleasePreparation {
public static void main(String[] a) throws Exception {
String version = "v0.4.21";
String version = "v0.4.22";
Path baseDir = Files.createTempDirectory("ipfs");
for (String os: Arrays.asList("linux", "windows", "darwin", "freebsd")) {
for (String arch: Arrays.asList("386", "amd64", "arm", "arm64")) {
Expand Down
Loading

0 comments on commit 05811d1

Please sign in to comment.