Skip to content

Install AnyGrasp

You can follow this blog post for detailed instructions on installing AnyGrasp.

Below is an example script used to deploy AnyGrasp as a server:

anygrasp_sdk/grasp_detection/server.py
import argparse
import os
parser = argparse.ArgumentParser()
parser.add_argument("-p", "--port", type=str)
parser.add_argument("-d", "--device", type=str)
args = parser.parse_args()
os.environ["CUDA_VISIBLE_DEVICES"] = args.device
import numpy as np
import open3d as o3d
from flask import Flask, request, jsonify
import torch
import gc
from gsnet import AnyGrasp
from graspnetAPI import GraspGroup
VISUALIZED = False
app = Flask(__name__)
class Configs:
checkpoint_path = "log/checkpoint_detection.tar"
max_gripper_width = 0.1
gripper_height = 0.03
top_down_grasp = True
debug = True
cfgs = Configs()
print("Loading AnyGrasp model into memory...")
anygrasp = AnyGrasp(cfgs)
anygrasp.load_net()
print("Model loaded successfully.")
def handle_grasp(data):
try:
dense_mode = data.get("dense_mode", False)
print(dense_mode)
colors = np.array(data["colors"])
depths = np.array(data["depths"])
fx, fy = float(data["fx"]), float(data["fy"])
cx, cy = float(data["cx"]), float(data["cy"])
scale = float(data["scale"])
intrinsic = o3d.camera.PinholeCameraIntrinsic()
intrinsic.set_intrinsics(
width=colors.shape[1], height=colors.shape[0], fx=fx, fy=fy, cx=cx, cy=cy
)
colors = colors.astype(np.float32) / 255.0
xmap, ymap = np.arange(depths.shape[1]), np.arange(depths.shape[0])
xmap, ymap = np.meshgrid(xmap, ymap)
points_z = depths / scale
points_x = (xmap - cx) / fx * points_z
points_y = (ymap - cy) / fy * points_z
mask = (points_z > -2.0) & (points_z <= 2.0)
points = np.stack([points_x, points_y, points_z], axis=-1)
points = points[mask].astype(np.float32)
colors = colors[mask].astype(np.float32)
lims = [-20.0, 20.0, -20.0, 20.0, -2.0, 2.0]
with torch.no_grad():
gg, cloud = anygrasp.get_grasp(
points,
colors,
lims=lims,
apply_object_mask=True,
dense_grasp=dense_mode,
collision_detection=True,
)
if gg is None or len(gg) == 0:
return None
gg = gg.nms().sort_by_score()
gg_pick = gg[:] # top-K or all
if VISUALIZED:
trans_mat = np.array([[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, -1, 0],
[0, 0, 0, 1]])
cloud.transform(trans_mat)
grippers = gg_pick.to_open3d_geometry_list()
for gripper in grippers:
gripper.transform(trans_mat)
o3d.visualization.draw_geometries([*grippers, cloud])
return gg_pick
finally:
gc.collect()
torch.cuda.empty_cache()
@app.route("/process", methods=["POST"])
def process_data():
try:
data = request.get_json()
data["colors"] = np.array(data["colors"], dtype=np.uint8)
data["depths"] = np.array(data["depths"], dtype=np.uint16)
result = handle_grasp(data)
if result is None:
return jsonify({"message": "No Grasp detected!"}), 400
grasp_list = []
for grasp in result:
grasp_list.append({
"translation": grasp.translation.tolist(),
"rotation_matrix": grasp.rotation_matrix.tolist(),
"depth": grasp.depth,
"score": grasp.score,
})
return jsonify({"grasp_groups": grasp_list}), 200
except Exception as e:
print("Error:", e)
return jsonify({"error": str(e)}), 500
if __name__ == "__main__":
app.run(host="0.0.0.0", port=int(args.port), threaded=False)

You can launch the AnyGrasp server with the following command:

Terminal window
python anygrasp_sdk/grasp_detection/server.py -p 5000 -d 0
  • -p specifies the port that the server will listen on.
  • -d specifies which GPU to use.

You can also expose this port within your local network, allowing multiple servers to access the same AnyGrasp server instance simultaneously.

You can edit default section in configs/miscs/anygrasp.yml to change the server configuration to your server address and port.