Skip to content

Commit

Permalink
Rev467, requirements.txt accept newer dependecies, Boost dbschema.jso…
Browse files Browse the repository at this point in the history
…n, Move getDirname getFilename to helper, Verify optional files, Includes not allowed in user files, Optional files rules, Peer hashfield functions, Test optional files signing, Test file info, Test verify file, Test helpers
  • Loading branch information
shortcutme committed Sep 30, 2015
1 parent a7d8d48 commit 9d7d4f1
Show file tree
Hide file tree
Showing 22 changed files with 484 additions and 218 deletions.
4 changes: 2 additions & 2 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
gevent==1.0.1
msgpack-python==0.4.4
gevent>=1.0.1
msgpack-python>=0.4.4
2 changes: 1 addition & 1 deletion src/Config.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ class Config(object):

def __init__(self, argv):
self.version = "0.3.2"
self.rev = 465
self.rev = 467
self.argv = argv
self.action = None
self.createParser()
Expand Down
77 changes: 47 additions & 30 deletions src/Content/ContentManager.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from Debug import Debug
from Crypt import CryptHash
from Config import config

from util import helper

class ContentManager(object):

Expand All @@ -26,8 +26,8 @@ def loadContent(self, content_inner_path="content.json", add_bad_files=True, del
content_inner_path = content_inner_path.strip("/") # Remove / from begning
old_content = self.contents.get(content_inner_path)
content_path = self.site.storage.getPath(content_inner_path)
content_dir = self.toDir(self.site.storage.getPath(content_inner_path))
content_inner_dir = self.toDir(content_inner_path)
content_dir = helper.getDirname(self.site.storage.getPath(content_inner_path))
content_inner_dir = helper.getDirname(content_inner_path)

if os.path.isfile(content_path):
try:
Expand Down Expand Up @@ -140,16 +140,29 @@ def getFileInfo(self, inner_path):
while True:
content_inner_path = "%s/content.json" % "/".join(dirs)
content = self.contents.get(content_inner_path.strip("/"))
if content and "files" in content: # Check if content.json exists

# Check in files
if content and "files" in content:
back = content["files"].get("/".join(inner_path_parts))
if back:
back["content_inner_path"] = content_inner_path
back["optional"] = False
return back

# Check in optional files
if content and "files_optional" in content: # Check if file in this content.json
back = content["files_optional"].get("/".join(inner_path_parts))
if back:
back["content_inner_path"] = content_inner_path
back["optional"] = True
return back

if content and "user_contents" in content: # User dir
# Return the rules if user dir
if content and "user_contents" in content:
back = content["user_contents"]
# Content.json is in the users dir
back["content_inner_path"] = re.sub("(.*)/.*?$", "\\1/content.json", inner_path)
back["optional"] = None
return back

# No inner path in this dir, lets try the parent dir
Expand Down Expand Up @@ -234,6 +247,7 @@ def getUserContentRules(self, parent_content, inner_path, content):
rules["signers"] = []
rules["signers"].append(user_address) # Add user as valid signer
rules["user_address"] = user_address
rules["includes_allowed"] = False

return rules

Expand All @@ -243,7 +257,7 @@ def hashFiles(self, dir_inner_path, ignore_pattern=None, optional_pattern=None):
files_optional_node = {}

for file_relative_path in self.site.storage.list(dir_inner_path):
file_name = self.toFilename(file_relative_path)
file_name = helper.getFilename(file_relative_path)

ignored = optional = False
if file_name == "content.json":
Expand Down Expand Up @@ -283,12 +297,12 @@ def sign(self, inner_path="content.json", privatekey=None, filewrite=True, updat
if extend:
content.update(extend) # Add custom fields

directory = self.toDir(self.site.storage.getPath(inner_path))
inner_directory = self.toDir(inner_path)
directory = helper.getDirname(self.site.storage.getPath(inner_path))
inner_directory = helper.getDirname(inner_path)
self.log.info("Opening site data directory: %s..." % directory)

changed_files = [inner_path]
files_node, files_optional_node = self.hashFiles(self.toDir(inner_path), content.get("ignore"), content.get("optional"))
files_node, files_optional_node = self.hashFiles(helper.getDirname(inner_path), content.get("ignore"), content.get("optional"))

# Find changed files
files_merged = files_node.copy()
Expand All @@ -310,13 +324,17 @@ def sign(self, inner_path="content.json", privatekey=None, filewrite=True, updat
new_content = content.copy() # Create a copy of current content.json
new_content["files"] = files_node # Add files sha512 hash
if files_optional_node:
new_content["files_optional_node"] = files_optional_node
new_content["files_optional"] = files_optional_node
elif "files_optional" in new_content:
del new_content["files_optional"]

new_content["modified"] = time.time() # Add timestamp
if inner_path == "content.json":
new_content["address"] = self.site.address
new_content["zeronet_version"] = config.version
new_content["signs_required"] = content.get("signs_required", 1)

# Verify private key
from Crypt import CryptBitcoin
self.log.info("Verifying private key...")
privatekey_address = CryptBitcoin.privatekeyToAddress(privatekey)
Expand Down Expand Up @@ -409,6 +427,7 @@ def verifyCert(self, inner_path, content):
# Return: True or False
def verifyContent(self, inner_path, content):
content_size = len(json.dumps(content)) + sum([file["size"] for file in content["files"].values()]) # Size of new content
content_size_optional = sum([file["size"] for file in content.get("files_optional", {}).values()])
site_size = self.getTotalSize(ignore=inner_path) + content_size # Site size without old content
if site_size > self.site.settings.get("size", 0):
self.site.settings["size"] = site_size # Save to settings if larger
Expand All @@ -433,23 +452,34 @@ def verifyContent(self, inner_path, content):
return False

# Check include size limit
if rules.get("max_size"): # Include size limit
if rules.get("max_size") is not None: # Include size limit
if content_size > rules["max_size"]:
self.log.error("%s: Include too large %s > %s" % (inner_path, content_size, rules["max_size"]))
return False

# Check if content includes allowed
if rules.get("includes_allowed") is False and content.get("includes"):
self.log.error("%s: Includes not allowed" % inner_path)
return False # Includes not allowed
if rules.get("max_size_optional") is not None: # Include optional files limit
if content_size_optional > rules["max_size_optional"]:
self.log.error("%s: Include optional files too large %s > %s" % (inner_path, content_size_optional, rules["max_size_optional"]))
return False

# Filename limit
if rules.get("files_allowed"):
for file_inner_path in content["files"].keys():
if not re.match("^%s$" % rules["files_allowed"], file_inner_path):
self.log.error("%s: File not allowed" % file_inner_path)
self.log.error("%s %s: File not allowed" % (inner_path, file_inner_path))
return False

if rules.get("files_allowed_optional"):
for file_inner_path in content.get("files_optional", {}).keys():
if not re.match("^%s$" % rules["files_allowed_optional"], file_inner_path):
self.log.error("%s %s: Optional file not allowed" % (inner_path, file_inner_path))
return False

# Check if content includes allowed
if rules.get("includes_allowed") is False and content.get("includes"):
self.log.error("%s: Includes not allowed" % inner_path)
return False # Includes not allowed

return True # All good

# Verify file validity
Expand Down Expand Up @@ -507,7 +537,7 @@ def verifyFile(self, inner_path, file, ignore_same=True):
valid_signs += CryptBitcoin.verify(sign_content, address, signs[address])
if valid_signs >= signs_required:
break # Break if we has enough signs

self.log.debug("%s: Valid signs: %s/%s" % (inner_path, valid_signs, signs_required))
return valid_signs >= signs_required
else: # Old style signing
return CryptBitcoin.verify(sign_content, self.site.address, sign)
Expand Down Expand Up @@ -537,19 +567,6 @@ def verifyFile(self, inner_path, file, ignore_same=True):
self.log.error("File not in content.json: %s" % inner_path)
return False

# Get dir from file
# Return: data/site/content.json -> data/site
def toDir(self, inner_path):
file_dir = re.sub("[^/]*?$", "", inner_path).strip("/")
if file_dir:
file_dir += "/" # Add / at end if its not the root
return file_dir

# Get dir from file
# Return: data/site/content.json -> data/site
def toFilename(self, inner_path):
return re.sub("^.*/", "", inner_path)


if __name__ == "__main__":
def testSign():
Expand Down
38 changes: 35 additions & 3 deletions src/Peer/Peer.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import logging
import gevent
import time
import array

import gevent

from cStringIO import StringIO
from Debug import Debug
Expand All @@ -14,8 +16,8 @@
# Communicate remote peers
class Peer(object):
__slots__ = (
"ip", "port", "site", "key", "connection", "last_found", "last_response",
"last_ping", "added", "connection_error", "hash_failed", "download_bytes", "download_time"
"ip", "port", "site", "key", "connection", "last_found", "last_response", "last_ping", "last_hashfield",
"hashfield", "added", "connection_error", "hash_failed", "download_bytes", "download_time"
)

def __init__(self, ip, port, site=None):
Expand All @@ -25,6 +27,8 @@ def __init__(self, ip, port, site=None):
self.key = "%s:%s" % (ip, port)

self.connection = None
self.hashfield = array.array("H") # Got optional files hash_id
self.last_hashfield = None # Last time hashfiled downloaded
self.last_found = time.time() # Time of last found in the torrent tracker
self.last_response = None # Time of last successful response from peer
self.last_ping = None # Last response time for ping
Expand Down Expand Up @@ -230,6 +234,34 @@ def remove(self):
if self.connection:
self.connection.close()

# - HASHFIELD -

def updateHashfield(self, force=False):
# Don't update hashfield again in 15 min
if self.last_hashfield and time.time() - self.last_hashfield > 60 * 15 and not force:
return False

response = self.request("getHashfield", {"site": self.site.address})
if not response or "error" in response:
return False
self.last_hashfield = time.time()
self.hashfield = response["hashfield"]

return self.hashfield

def setHashfield(self, hashfield_dump):
self.hashfield.fromstring(hashfield_dump)

def hasHash(self, hash_id):
return hash_id in self.hashfield

# Return: ["ip:port", "ip:port",...]
def findHash(self, hash_id):
response = self.request("findHash", {"site": self.site.address, "hash_id": hash_id})
if not response or "error" in response:
return False
return [helper.unpackAddress(peer) for peer in response["peers"]]

# - EVENTS -

# On connection error
Expand Down
4 changes: 2 additions & 2 deletions src/Site/Site.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ def downloadContent(self, inner_path, download_files=True, peer=None, check_modi
s = time.time()
self.log.debug("Downloading %s..." % inner_path)
found = self.needFile(inner_path, update=self.bad_files.get(inner_path))
content_inner_dir = self.content_manager.toDir(inner_path)
content_inner_dir = helper.getDirname(inner_path)
if not found:
self.log.debug("Download %s failed, check_modifications: %s" % (inner_path, check_modifications))
if check_modifications: # Download failed, but check modifications if its succed later
Expand Down Expand Up @@ -386,7 +386,7 @@ def clone(self, address, privatekey=None, address_index=None, overwrite=False):
# Copy files
for content_inner_path, content in self.content_manager.contents.items():
for file_relative_path in sorted(content["files"].keys()):
file_inner_path = self.content_manager.toDir(content_inner_path) + file_relative_path # Relative to content.json
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to content.json
file_inner_path = file_inner_path.strip("/") # Strip leading /
if file_inner_path.split("/")[0] in default_dirs: # Dont copy directories that has -default postfixed alternative
self.log.debug("[SKIP] %s (has default alternative)" % file_inner_path)
Expand Down
16 changes: 10 additions & 6 deletions src/Site/SiteStorage.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,14 @@
import shutil
import json
import time
import sys

import sqlite3
import gevent.event

from Db import Db
from Debug import Debug
from Config import config
from util import helper


class SiteStorage:
Expand Down Expand Up @@ -98,7 +98,7 @@ def rebuildDb(self, delete_db=True):
for file_relative_path in content["files"].keys():
if not file_relative_path.endswith(".json"):
continue # We only interesed in json files
content_inner_path_dir = self.site.content_manager.toDir(content_inner_path) # Content.json dir relative to site
content_inner_path_dir = helper.getDirname(content_inner_path) # Content.json dir relative to site
file_inner_path = content_inner_path_dir + file_relative_path # File Relative to site dir
file_inner_path = file_inner_path.strip("/") # Strip leading /
file_path = self.getPath(file_inner_path)
Expand Down Expand Up @@ -170,7 +170,6 @@ def list(self, dir_inner_path):
else:
yield file_name


# Site content updated
def onUpdated(self, inner_path):
file_path = self.getPath(inner_path)
Expand Down Expand Up @@ -255,7 +254,7 @@ def verifyFiles(self, quick_check=False): # Fast = using file size
self.log.debug("[MISSING] %s" % content_inner_path)
bad_files.append(content_inner_path)
for file_relative_path in content["files"].keys():
file_inner_path = self.site.content_manager.toDir(content_inner_path) + file_relative_path # Relative to site dir
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
file_inner_path = file_inner_path.strip("/") # Strip leading /
file_path = self.getPath(file_inner_path)
if not os.path.isfile(file_path):
Expand Down Expand Up @@ -304,8 +303,13 @@ def deleteFiles(self):
files = [] # Get filenames
for content_inner_path, content in self.site.content_manager.contents.items():
files.append(content_inner_path)
for file_relative_path in content["files"].keys():
file_inner_path = self.site.content_manager.toDir(content_inner_path) + file_relative_path # Relative to site dir
# Add normal files
for file_relative_path in content.get("files", {}).keys():
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
files.append(file_inner_path)
# Add optional files
for file_relative_path in content.get("files_optional", {}).keys():
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
files.append(file_inner_path)

for inner_path in files:
Expand Down
17 changes: 17 additions & 0 deletions src/Test/Spy.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
class Spy:
def __init__(self, obj, func_name):
self.obj = obj
self.func_name = func_name
self.func_original = getattr(self.obj, func_name)
self.calls = []

def __enter__(self, *args, **kwargs):
def loggedFunc(cls, *args, **kwags):
print "Logging", self, args, kwargs
self.calls.append(args)
return self.func_original(cls, *args, **kwargs)
setattr(self.obj, self.func_name, loggedFunc)
return self.calls

def __exit__(self, *args, **kwargs):
setattr(self.obj, self.func_name, self.func_original)
Loading

0 comments on commit 9d7d4f1

Please sign in to comment.