Skip to content

Commit

Permalink
update week 11
Browse files Browse the repository at this point in the history
  • Loading branch information
as-wanfang committed Apr 30, 2020
1 parent 17842ef commit 1704571
Show file tree
Hide file tree
Showing 3 changed files with 145 additions and 30 deletions.
67 changes: 40 additions & 27 deletions Simulation/tasks/Claw_machine/toy.py
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -19,21 +19,23 @@ def scene(scene_file_name):
# return abs dir of scene file
return scene_path + scene_file_name

def franka_move(start, target, grasp_pose):
def franka_move(start, target, angle):
franka.clear_path = True
start[2] += 0.1
franka.move(env,start,euler=[0,np.radians(180),grasp_pose])
franka.move(env,start,euler=[0,np.radians(180),angle])
start[2] -= 0.07
franka.move(env,start,euler=[0,np.radians(180),grasp_pose])
franka.move(env,start,euler=[0,np.radians(180),angle])

success = False
for toy in toys:
if franka.gripper._proximity_sensor.is_detected(toy):
franka.grasp(env,toy)
success = True
break
if toy is toys[-1]:
raise RuntimeError('can not sense toy')
if not success:
print("Fail to grasp the toy!")
start[2] += 0.07
franka.move(env,start,euler=[0,np.radians(180),grasp_pose])
franka.move(env,start,euler=[0,np.radians(180),angle])
franka.home(env)
a = copy.copy(franka.home_joints)
a[0] += np.pi/2
Expand All @@ -42,8 +44,11 @@ def franka_move(start, target, grasp_pose):
franka.home(env)

# TODO: initiate grasp predictor here, the provided model with weight has 18 classes
# The predictor can accept the custom image size at initialization, remember to resize your image according to this shape.
# You can use the default image size.
# predictor = ...


if __name__ == '__main__':
env = Env(scene('Claw_machine.ttt'))
env.start()
Expand All @@ -62,7 +67,7 @@ def franka_move(start, target, grasp_pose):
box_dest = Shape('box_dest')
target = Shape('Sphere')
toys = [Bird, Hipp, Elephant, Penguin]
dest_position = box_dest.get_position()
place_position = box_dest.get_position()

# random exchange the position of toys
toy_positions = [toy.get_position() for toy in toys]
Expand All @@ -74,31 +79,39 @@ def franka_move(start, target, grasp_pose):
franka.home(env)

# TODO: complete the detection and grasp pipeline in the while loop.
end = False
while not end:
# capture image
img = cam.capture_bgr()

# crop the image so that you only feed the region of interest to the neural network
# this will reduce the computation cost of the CNN model
ros = img[41:299,114:372] # (258, 258)
print("=========================Start picking...")
while True:
# TODO: capture rbg image


# TODO: crop the image so that you only feed the region of interest to the neural network


# TODO: resize you region of interest according to your predictor


# TODO: feed the cropped image to the predictor and obtain the best grasping pixel x, y and rotation angle

depth_image = cam.capture_depth(in_meters=True)
ros = cv2.resize(ros, (1280, 720), interpolation=cv2.INTER_CUBIC)
y_, p_best, grasp_pose = predictor.run(ros)
x,y,angle = grasp_pose
possi = p_best.max()
print('possibility:',possi)

# TODO: compute the gasping pixel in the original image cx, cy and the success probability


# We add a criteria to stop the simulation if the predictor fail to find
# any good grasp with success probability > 0.8
if possi < 0.8:
print("Fail to find good grasp, ending the simulation")
break
cx,cy = int(x*258/1280+114),int(y*258/720+41) # u:cy, v:cx
# print(x,y,cx,cy)
real_position = (cam.H@cam.uv2XYZ(depth_image,cx,cy))[0:3]
real_position[2] = 1.123
target.set_position(real_position)

# TODO: transform u, v, z to x, y, z, you might need to set the z to 1.123 mannually for success path planning.


# visualize the prasping point in the simulation
target.set_position(grasp_position)
cv2.circle(img,(cx,cy),5,(0,0,255),5)
cv2.circle(ros,(x,y),5,(0,0,255),5)
franka_move(real_position, dest_position, grasp_pose[2])

# move the robot to execute the grasp
franka_move(grasp_position, place_position, angle)

env.stop()
env.shutdown()
Expand Down
6 changes: 3 additions & 3 deletions Simulation/tasks/Claw_machine/toy_completed.py
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def franka_move(start, target, grasp_pose):
franka.home(env)

NUM_THETAS = 9
predictor = FCPredictor(NUM_THETAS*2, './net9/Network9-1000-100')
predictor = FCPredictor(NUM_THETAS*2, './checkpoint_softgripper_Network9/Network9-1000-100')

if __name__ == '__main__':
env = Env(scene('Claw_machine.ttt'))
Expand All @@ -61,7 +61,7 @@ def franka_move(start, target, grasp_pose):
box_dest = Shape('box_dest')
target = Shape('Sphere')
toys = [Bird, Hipp, Elephant, Penguin]
dest_position = box_dest.get_position()
place_position = box_dest.get_position()

# random exchange the position of toys
toy_positions = [toy.get_position() for toy in toys]
Expand Down Expand Up @@ -91,7 +91,7 @@ def franka_move(start, target, grasp_pose):
target.set_position(real_position)
cv2.circle(img,(cx,cy),5,(0,0,255),5)
cv2.circle(ros,(x,y),5,(0,0,255),5)
franka_move(real_position, dest_position, grasp_pose[2])
franka_move(real_position, place_position, grasp_pose[2])

env.stop()
env.shutdown()
Expand Down
102 changes: 102 additions & 0 deletions Simulation/tasks/Claw_machine/toy_completed_new.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
from os.path import dirname, abspath
from os import system, environ
sim_path = dirname(dirname(dirname(dirname(abspath(__file__)))))
scene_path = sim_path + '/Simulation/scene/'
import sys
sys.path.append(sim_path)
from Simulation.src.camera import Camera
from Simulation.src.env import Env
from Simulation.src.franka import Franka
from pyrep.objects.shape import Shape
import numpy as np
import cv2
import copy
environ['TF_CPP_MIN_LOG_LEVEL'] = "3" # stop print warning!!!
from DeepClaw.modules.end2end.graspNet.fc_predictor import FCPredictor

def scene(scene_file_name):
# return abs dir of scene file
return scene_path + scene_file_name

def franka_move(start, target, grasp_pose):
franka.clear_path = True
start[2] += 0.1
franka.move(env,start,euler=[0,np.radians(180),grasp_pose])
start[2] -= 0.07
franka.move(env,start,euler=[0,np.radians(180),grasp_pose])

success = False
for toy in toys:
if franka.gripper._proximity_sensor.is_detected(toy):
franka.grasp(env,toy)
success = True
break
if not success:
print("Fail to grasp the toy!")
start[2] += 0.07
franka.move(env,start,euler=[0,np.radians(180),grasp_pose])
franka.home(env)
a = copy.copy(franka.home_joints)
a[0] += np.pi/2
franka.move_j(a,env)
franka.release(env)
franka.home(env)

NUM_THETAS = 9
predictor = FCPredictor(NUM_THETAS*2, './checkpoint_softgripper_Network9/Network9-1000-100')

if __name__ == '__main__':
env = Env(scene('Claw_machine.ttt'))
env.start()

# franka
franka = Franka()

# cam
cam = Camera()

# toys
Bird = Shape('Bird')
Hipp = Shape('Hipp')
Elephant = Shape('Elephant')
Penguin = Shape('Penguin')
box_dest = Shape('box_dest')
target = Shape('Sphere')
toys = [Bird, Hipp, Elephant, Penguin]
place_position = box_dest.get_position()

# random exchange the position of toys
toy_positions = [toy.get_position() for toy in toys]
arr = np.arange(len(toys))
np.random.shuffle(arr)
[toys[i].set_position(toy_positions[arr[i]]) for i in range(len(toys))]

# set franka to home joints
franka.home(env)

end = False
print("=========================Start picking...")
while not end:
img = cam.capture_bgr()
ros = img[41:299,114:372] # (258, 258)
depth_image = cam.capture_depth(in_meters=True)
ros = cv2.resize(ros, (1280, 720), interpolation=cv2.INTER_CUBIC)
y_, p_best, grasp_pose = predictor.run(ros)
x,y,angle = grasp_pose
possi = p_best.max()
print('Sucsess possibility:',possi)
if possi < 0.8:
print("Fail to find good grasp, ending the simulation")
break
cx,cy = int(x*258/1280+114),int(y*258/720+41) # u:cy, v:cx
# print(x,y,cx,cy)
real_position = (cam.H@cam.uv2XYZ(depth_image,cx,cy))[0:3]
real_position[2] = 1.123
target.set_position(real_position)
cv2.circle(img,(cx,cy),5,(0,0,255),5)
cv2.circle(ros,(x,y),5,(0,0,255),5)
franka_move(real_position, place_position, grasp_pose[2])

env.stop()
env.shutdown()

0 comments on commit 1704571

Please sign in to comment.