Skip to content

Commit

Permalink
generate obj files for rendering.
Browse files Browse the repository at this point in the history
  • Loading branch information
Siwang Li committed Jul 25, 2015
1 parent 7f5c035 commit 92bfd19
Show file tree
Hide file tree
Showing 16 changed files with 1,639 additions and 63 deletions.
6 changes: 3 additions & 3 deletions data/dino/two_mprgp_h002_coarse.ini
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,12 @@
"gravity" : [0.0,0.0,0.0],
"coll_pen" : 1e7,
"coll_dectect_depth": 0.0,
"num_frames" : 100,
"num_frames" : 500,
"save_to" : "tempt_two_mprgp_h002_coarse",

"newton_max_iteration":1,
"newton_tolerance" :1e-5,
"mprgp_tolerance" :1e-5,
"newton_tolerance" :1e-6,
"mprgp_tolerance" :1e-6,
"mprgp_max_iteration" :1000,

"static_friction" :0.0,
Expand Down
2 changes: 1 addition & 1 deletion data/dino/two_mprgp_h005_coarse.ini
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
"gravity" : [0.0,0.0,0.0],
"coll_pen" : 1e7,
"coll_dectect_depth": 0.0,
"num_frames" : 100,
"num_frames" : 200,
"save_to" : "tempt_two_mprgp_h005_coarse",

"newton_max_iteration":1,
Expand Down
4 changes: 2 additions & 2 deletions script/run_dino_comp.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,9 @@
for inif in init_files:
if not inif.endswith(".ini"):
continue
if inif.find("two_") < 0 or inif.find('mprgp_h01') < 0 or inif.find('coarse') < 0:
if inif.find("two_") < 0 or inif.find('mprgp_h') < 0 or inif.find('coarse') < 0:
continue
if inif.find('two_mprgp_h005_fine2') >= 0:
if inif.find('two_mprgp_h005_fine1') >= 0:
continue
f = init_files_dir + inif + " "
cmmd = "./bin/release/collision_handle " + f + " > ./tempt/" + inif + ".txt"
Expand Down
4 changes: 2 additions & 2 deletions src/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@ TARGET_LINK_LIBRARIES(${APP_NAME}
)

SET(LIBRARY_NAME mprgp)
# SET(lib_src ${utility_src} ${commonfile_src} ${collisionhandle_src} ${qp_src})
SET(lib_src ${qp_src})
SET(lib_src ${utility_src} ${commonfile_src} ${collisionhandle_src} ${qp_src})
# SET(lib_src ${qp_src})
LIST(REMOVE_ITEM lib_src ./CollisionHandle/main.cpp)
ADD_LIBRARY(${LIBRARY_NAME} ${lib_src})

Expand Down
Empty file modified src/Utility/AuxTools.h
100644 → 100755
Empty file.
48 changes: 48 additions & 0 deletions src/Utility/HashedId.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
#include "HashedId.h"
using namespace UTILITY;

HashedId::HashedId(){
_id[0]=_id[1]=_id[2]=_id[3]=-1;
_no=-1;
}

HashedId::HashedId(int a,int b,const int& no){
if(a > b)
std::swap(a,b);
_id[0]=-1;
_id[1]=-1;
_id[2]=a;
_id[3]=b;
_no=no;
}

HashedId::HashedId(int a,int b,int c,const int& no){

if(a > b)std::swap(a,b);
if(b > c)std::swap(b,c);
if(a > b)std::swap(a,b);

_id[0]=a;
_id[1]=b;
_id[2]=c;
_id[3]=-1;

_no=no;
}

HashedId::HashedId(const Eigen::Vector3i&pos,const int&edgeLen,const int&no){

_id[0]=edgeLen;
_id[1]=pos.x();
_id[2]=pos.y();
_id[3]=pos.z();
_no=no;
}

bool HashedId::operator==(const HashedId& other) const{

return _id[0] == other._id[0] &&
_id[1] == other._id[1] &&
_id[2] == other._id[2] &&
_id[3] == other._id[3];
}
31 changes: 31 additions & 0 deletions src/Utility/HashedId.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
#ifndef _HASHEDID_H_
#define _HASHEDID_H_

#include <boost/unordered_set.hpp>
#include <boost/unordered_map.hpp>
#include <eigen3/Eigen/Dense>

namespace UTILITY{

struct HashedId{
public:
HashedId();
HashedId(int a,int b,const int& no);
HashedId(int a,int b,int c,const int& no);
HashedId(const Eigen::Vector3i& pos,const int& edgeLen,const int& no);
bool operator==(const HashedId& other) const;
int _id[4];
int _no;
};

struct HashedIdHash: public boost::hash<HashedId>{

std::size_t operator()(const HashedId& i) const{
const boost::hash<int> h;
return h(i._id[0])+h(i._id[1])+h(i._id[2])+h(i._id[3]);
}
};
}

#endif /* _HASHEDID_H_ */

Loading

0 comments on commit 92bfd19

Please sign in to comment.